diff --git a/src/datafactory/azext_datafactory/azext_metadata.json b/src/datafactory/azext_datafactory/azext_metadata.json index 13025150393..4f48fa652a5 100644 --- a/src/datafactory/azext_datafactory/azext_metadata.json +++ b/src/datafactory/azext_datafactory/azext_metadata.json @@ -1,4 +1,4 @@ { "azext.isExperimental": true, - "azext.minCliCoreVersion": "2.3.1" + "azext.minCliCoreVersion": "2.11.0" } \ No newline at end of file diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index d4c212fbd12..f42e472af34 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -19,28 +19,28 @@ helps['datafactory factory list'] = """ type: command - short-summary: Lists factories under the specified subscription. + short-summary: "Lists factories under the specified subscription." examples: - name: Factories_ListByResourceGroup text: |- - az datafactory factory list --resource-group "myResourceGroup" + az datafactory factory list --resource-group "exampleResourceGroup" """ helps['datafactory factory show'] = """ type: command - short-summary: Gets a factory. + short-summary: "Gets a factory." examples: - name: Factories_Get text: |- - az datafactory factory show --name "myFactoryName" --resource-group "myResourceGroup" + az datafactory factory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ helps['datafactory factory create'] = """ type: command - short-summary: Creates or updates a factory. + short-summary: "Creates or updates a factory." parameters: - name: --factory-vsts-configuration - short-summary: Factory's VSTS repo information. + short-summary: "Factory's VSTS repo information." long-summary: | Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \ repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX @@ -54,7 +54,7 @@ root-folder: Required. Root folder. last-commit-id: Last commit id. - name: --factory-git-hub-configuration - short-summary: Factory's GitHub repo information. + short-summary: "Factory's GitHub repo information." long-summary: | Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \ collaboration-branch=XX root-folder=XX last-commit-id=XX @@ -69,35 +69,35 @@ examples: - name: Factories_CreateOrUpdate text: |- - az datafactory factory create --location "East US" --name "myFactoryName" --resource-group \ -"myResourceGroup" + az datafactory factory create --location "East US" --name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" """ helps['datafactory factory update'] = """ type: command - short-summary: Updates a factory. + short-summary: "Updates a factory." examples: - name: Factories_Update text: |- - az datafactory factory update --name "myFactoryName" --tags exampleTag="exampleValue" --resource-group \ -"myResourceGroup" + az datafactory factory update --name "exampleFactoryName" --tags exampleTag="exampleValue" \ +--resource-group "exampleResourceGroup" """ helps['datafactory factory delete'] = """ type: command - short-summary: Deletes a factory. + short-summary: "Deletes a factory." examples: - name: Factories_Delete text: |- - az datafactory factory delete --name "myFactoryName" --resource-group "myResourceGroup" + az datafactory factory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ helps['datafactory factory configure-factory-repo'] = """ type: command - short-summary: Updates a factory's repo information. + short-summary: "Updates a factory's repo information." parameters: - name: --factory-vsts-configuration - short-summary: Factory's VSTS repo information. + short-summary: "Factory's VSTS repo information." long-summary: | Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \ repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX @@ -111,7 +111,7 @@ root-folder: Required. Root folder. last-commit-id: Last commit id. - name: --factory-git-hub-configuration - short-summary: Factory's GitHub repo information. + short-summary: "Factory's GitHub repo information." long-summary: | Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \ collaboration-branch=XX root-folder=XX last-commit-id=XX @@ -127,30 +127,30 @@ - name: Factories_ConfigureFactoryRepo text: |- az datafactory factory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1\ -234-12345678abc/resourceGroups/myResourceGroup/providers/Microsoft.DataFactory/factories/myFactoryName" \ +234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ --factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ repository-name="repo" root-folder="/" tenant-id="" --location "East US" """ helps['datafactory factory get-data-plane-access'] = """ type: command - short-summary: Get Data Plane access. + short-summary: "Get Data Plane access." examples: - name: Factories_GetDataPlaneAccess text: |- - az datafactory factory get-data-plane-access --name "myFactoryName" --access-resource-path "" \ + az datafactory factory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" \ --expire-time "2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ -"2018-11-10T02:46:20.2659347Z" --resource-group "myResourceGroup" +"2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" """ helps['datafactory factory get-git-hub-access-token'] = """ type: command - short-summary: Get GitHub Access Token. + short-summary: "Get GitHub Access Token." examples: - name: Factories_GetGitHubAccessToken text: |- - az datafactory factory get-git-hub-access-token --name "myFactoryName" --git-hub-access-code "some" \ ---git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "myResourceGroup" + az datafactory factory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code \ +"some" --git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime'] = """ @@ -160,22 +160,22 @@ helps['datafactory integration-runtime list'] = """ type: command - short-summary: Lists integration runtimes. + short-summary: "Lists integration runtimes." examples: - name: IntegrationRuntimes_ListByFactory text: |- - az datafactory integration-runtime list --factory-name "myFactoryName" --resource-group \ -"myResourceGroup" + az datafactory integration-runtime list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" """ helps['datafactory integration-runtime show'] = """ type: command - short-summary: Gets an integration runtime. + short-summary: "Gets an integration runtime." examples: - name: IntegrationRuntimes_Get text: |- - az datafactory integration-runtime show --factory-name "myFactoryName" --name "myIntegrationRuntime" \ ---resource-group "myResourceGroup" + az datafactory integration-runtime show --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime linked-integration-runtime'] = """ @@ -185,14 +185,15 @@ helps['datafactory integration-runtime linked-integration-runtime create'] = """ type: command - short-summary: Create a linked integration runtime entry in a shared integration runtime. + short-summary: "Create a linked integration runtime entry in a shared integration runtime." examples: - name: IntegrationRuntimes_CreateLinkedIntegrationRuntime text: |- az datafactory integration-runtime linked-integration-runtime create --name \ "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" --location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" \ ---subscription-id "061774c7-4b5a-4159-a55b-365581830283" --factory-name "myFactoryName" --integration-runtime-name \ -"myIntegrationRuntime" --resource-group "myResourceGroup" --subscription-id "12345678-1234-1234-1234-12345678abc" +--subscription-id "061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" \ +--integration-runtime-name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --subscription-id \ +"12345678-1234-1234-1234-12345678abc" """ helps['datafactory integration-runtime managed'] = """ @@ -202,12 +203,7 @@ helps['datafactory integration-runtime managed create'] = """ type: command - short-summary: Creates or updates an integration runtime. - examples: - - name: IntegrationRuntimes_Create - text: |- - az datafactory integration-runtime managed create --factory-name "myFactoryName" --description "A \ -selfhosted integration runtime" --name "myIntegrationRuntime" --resource-group "myResourceGroup" + short-summary: "Creates or updates an integration runtime." """ helps['datafactory integration-runtime self-hosted'] = """ @@ -217,138 +213,139 @@ helps['datafactory integration-runtime self-hosted create'] = """ type: command - short-summary: Creates or updates an integration runtime. + short-summary: "Creates or updates an integration runtime." examples: - name: IntegrationRuntimes_Create text: |- - az datafactory integration-runtime self-hosted create --factory-name "myFactoryName" --description "A \ -selfhosted integration runtime" --name "myIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime self-hosted create --factory-name "exampleFactoryName" --description \ +"A selfhosted integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime update'] = """ type: command - short-summary: Updates an integration runtime. + short-summary: "Updates an integration runtime." examples: - name: IntegrationRuntimes_Update text: |- - az datafactory integration-runtime update --factory-name "myFactoryName" --name "myIntegrationRuntime" \ ---resource-group "myResourceGroup" --auto-update "Off" --update-delay-offset "\\"PT3H\\"" + az datafactory integration-runtime update --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --auto-update "Off" --update-delay-offset \ +"\\"PT3H\\"" """ helps['datafactory integration-runtime delete'] = """ type: command - short-summary: Deletes an integration runtime. + short-summary: "Deletes an integration runtime." examples: - name: IntegrationRuntimes_Delete text: |- - az datafactory integration-runtime delete --factory-name "myFactoryName" --name "myIntegrationRuntime" \ ---resource-group "myResourceGroup" + az datafactory integration-runtime delete --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime get-connection-info'] = """ type: command - short-summary: Gets the on-premises integration runtime connection information for encrypting the on-premises data \ -source credentials. + short-summary: "Gets the on-premises integration runtime connection information for encrypting the on-premises \ +data source credentials." examples: - name: IntegrationRuntimes_GetConnectionInfo text: |- - az datafactory integration-runtime get-connection-info --factory-name "myFactoryName" --name \ -"myIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime get-connection-info --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime get-monitoring-data'] = """ type: command - short-summary: Get the integration runtime monitoring data, which includes the monitor data for all the nodes \ -under this integration runtime. + short-summary: "Get the integration runtime monitoring data, which includes the monitor data for all the nodes \ +under this integration runtime." examples: - name: IntegrationRuntimes_GetMonitoringData text: |- - az datafactory integration-runtime get-monitoring-data --factory-name "myFactoryName" --name \ -"myIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime get-monitoring-data --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime get-status'] = """ type: command - short-summary: Gets detailed status information for an integration runtime. + short-summary: "Gets detailed status information for an integration runtime." examples: - name: IntegrationRuntimes_GetStatus text: |- - az datafactory integration-runtime get-status --factory-name "myFactoryName" --name \ -"myIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime get-status --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime list-auth-key'] = """ type: command - short-summary: Retrieves the authentication keys for an integration runtime. + short-summary: "Retrieves the authentication keys for an integration runtime." examples: - name: IntegrationRuntimes_ListAuthKeys text: |- - az datafactory integration-runtime list-auth-key --factory-name "myFactoryName" --name \ -"myIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime regenerate-auth-key'] = """ type: command - short-summary: Regenerates the authentication key for an integration runtime. + short-summary: "Regenerates the authentication key for an integration runtime." examples: - name: IntegrationRuntimes_RegenerateAuthKey text: |- - az datafactory integration-runtime regenerate-auth-key --factory-name "myFactoryName" --name \ -"myIntegrationRuntime" --key-name "authKey2" --resource-group "myResourceGroup" + az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime remove-link'] = """ type: command - short-summary: Remove all linked integration runtimes under specific data factory in a self-hosted integration \ -runtime. + short-summary: "Remove all linked integration runtimes under specific data factory in a self-hosted integration \ +runtime." examples: - name: IntegrationRuntimes_Upgrade text: |- - az datafactory integration-runtime remove-link --factory-name "myFactoryName" --name \ -"myIntegrationRuntime" --linked-factory-name "myFactoryName-linked" --resource-group "myResourceGroup" + az datafactory integration-runtime remove-link --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --linked-factory-name "exampleFactoryName-linked" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime start'] = """ type: command - short-summary: Starts a ManagedReserved type integration runtime. + short-summary: "Starts a ManagedReserved type integration runtime." examples: - name: IntegrationRuntimes_Start text: |- - az datafactory integration-runtime start --factory-name "myFactoryName" --name \ -"myManagedIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime start --factory-name "exampleFactoryName" --name \ +"exampleManagedIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime stop'] = """ type: command - short-summary: Stops a ManagedReserved type integration runtime. + short-summary: "Stops a ManagedReserved type integration runtime." examples: - name: IntegrationRuntimes_Stop text: |- - az datafactory integration-runtime stop --factory-name "myFactoryName" --name \ -"myManagedIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime stop --factory-name "exampleFactoryName" --name \ +"exampleManagedIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime sync-credentials'] = """ type: command - short-summary: Force the integration runtime to synchronize credentials across integration runtime nodes, and this \ -will override the credentials across all worker nodes with those available on the dispatcher node. If you already have \ -the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime \ -node than using this API directly. + short-summary: "Force the integration runtime to synchronize credentials across integration runtime nodes, and \ +this will override the credentials across all worker nodes with those available on the dispatcher node. If you already \ +have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration \ +runtime node than using this API directly." examples: - name: IntegrationRuntimes_SyncCredentials text: |- - az datafactory integration-runtime sync-credentials --factory-name "myFactoryName" --name \ -"myIntegrationRuntime" --resource-group "myResourceGroup" + az datafactory integration-runtime sync-credentials --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime upgrade'] = """ type: command - short-summary: Upgrade self-hosted integration runtime to latest version if availability. + short-summary: "Upgrade self-hosted integration runtime to latest version if availability." examples: - name: IntegrationRuntimes_Upgrade text: |- - az datafactory integration-runtime upgrade --factory-name "myFactoryName" --name "myIntegrationRuntime" \ ---resource-group "myResourceGroup" + az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime wait'] = """ @@ -358,8 +355,8 @@ - name: Pause executing next line of CLI script until the datafactory integration-runtime is successfully \ created. text: |- - az datafactory integration-runtime wait --factory-name "myFactoryName" --name "myIntegrationRuntime" \ ---resource-group "myResourceGroup" --created + az datafactory integration-runtime wait --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --created """ helps['datafactory integration-runtime-node'] = """ @@ -369,43 +366,43 @@ helps['datafactory integration-runtime-node show'] = """ type: command - short-summary: Gets a self-hosted integration runtime node. + short-summary: "Gets a self-hosted integration runtime node." examples: - name: IntegrationRuntimeNodes_Get text: |- - az datafactory integration-runtime-node show --factory-name "myFactoryName" --integration-runtime-name \ -"myIntegrationRuntime" --node-name "Node_1" --resource-group "myResourceGroup" + az datafactory integration-runtime-node show --factory-name "exampleFactoryName" \ +--integration-runtime-name "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime-node update'] = """ type: command - short-summary: Updates a self-hosted integration runtime node. + short-summary: "Updates a self-hosted integration runtime node." examples: - name: IntegrationRuntimeNodes_Update text: |- - az datafactory integration-runtime-node update --factory-name "myFactoryName" \ ---integration-runtime-name "myIntegrationRuntime" --node-name "Node_1" --resource-group "myResourceGroup" \ + az datafactory integration-runtime-node update --factory-name "exampleFactoryName" \ +--integration-runtime-name "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" \ --concurrent-jobs-limit 2 """ helps['datafactory integration-runtime-node delete'] = """ type: command - short-summary: Deletes a self-hosted integration runtime node. + short-summary: "Deletes a self-hosted integration runtime node." examples: - name: IntegrationRuntimesNodes_Delete text: |- - az datafactory integration-runtime-node delete --factory-name "myFactoryName" \ ---integration-runtime-name "myIntegrationRuntime" --node-name "Node_1" --resource-group "myResourceGroup" + az datafactory integration-runtime-node delete --factory-name "exampleFactoryName" \ +--integration-runtime-name "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime-node get-ip-address'] = """ type: command - short-summary: Get the IP address of self-hosted integration runtime node. + short-summary: "Get the IP address of self-hosted integration runtime node." examples: - name: IntegrationRuntimeNodes_GetIpAddress text: |- - az datafactory integration-runtime-node get-ip-address --factory-name "myFactoryName" \ ---integration-runtime-name "myIntegrationRuntime" --node-name "Node_1" --resource-group "myResourceGroup" + az datafactory integration-runtime-node get-ip-address --factory-name "exampleFactoryName" \ +--integration-runtime-name "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" """ helps['datafactory linked-service'] = """ @@ -415,43 +412,54 @@ helps['datafactory linked-service list'] = """ type: command - short-summary: Lists linked services. + short-summary: "Lists linked services." examples: - name: LinkedServices_ListByFactory text: |- - az datafactory linked-service list --factory-name "myFactoryName" --resource-group "myResourceGroup" + az datafactory linked-service list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" """ helps['datafactory linked-service show'] = """ type: command - short-summary: Gets a linked service. + short-summary: "Gets a linked service." examples: - name: LinkedServices_Get text: |- - az datafactory linked-service show --factory-name "myFactoryName" --name "myLinkedService" \ ---resource-group "myResourceGroup" + az datafactory linked-service show --factory-name "exampleFactoryName" --name "exampleLinkedService" \ +--resource-group "exampleResourceGroup" """ helps['datafactory linked-service create'] = """ type: command - short-summary: Creates or updates a linked service. + short-summary: "Creates or updates a linked service." examples: - name: LinkedServices_Create text: |- - az datafactory linked-service create --factory-name "myFactoryName" --properties \ + az datafactory linked-service create --factory-name "exampleFactoryName" --properties \ "{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{\\"connectionString\\":{\\"type\\":\\"SecureString\\",\\"value\\"\ :\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name \ -"myLinkedService" --resource-group "myResourceGroup" +"exampleLinkedService" --resource-group "exampleResourceGroup" +""" + +helps['datafactory linked-service update'] = """ + type: command + short-summary: "Creates or updates a linked service." + examples: + - name: LinkedServices_Update + text: |- + az datafactory linked-service update --factory-name "exampleFactoryName" --description "Example \ +description" --name "exampleLinkedService" --resource-group "exampleResourceGroup" """ helps['datafactory linked-service delete'] = """ type: command - short-summary: Deletes a linked service. + short-summary: "Deletes a linked service." examples: - name: LinkedServices_Delete text: |- - az datafactory linked-service delete --factory-name "myFactoryName" --name "myLinkedService" \ ---resource-group "myResourceGroup" + az datafactory linked-service delete --factory-name "exampleFactoryName" --name "exampleLinkedService" \ +--resource-group "exampleResourceGroup" """ helps['datafactory dataset'] = """ @@ -461,45 +469,64 @@ helps['datafactory dataset list'] = """ type: command - short-summary: Lists datasets. + short-summary: "Lists datasets." examples: - name: Datasets_ListByFactory text: |- - az datafactory dataset list --factory-name "myFactoryName" --resource-group "myResourceGroup" + az datafactory dataset list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" """ helps['datafactory dataset show'] = """ type: command - short-summary: Gets a dataset. + short-summary: "Gets a dataset." examples: - name: Datasets_Get text: |- - az datafactory dataset show --name "myDataset" --factory-name "myFactoryName" --resource-group \ -"myResourceGroup" + az datafactory dataset show --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" """ helps['datafactory dataset create'] = """ type: command - short-summary: Creates or updates a dataset. + short-summary: "Creates or updates a dataset." examples: - name: Datasets_Create text: |- az datafactory dataset create --properties "{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{\\"type\ -\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"myLinkedService\\"},\\"parameters\\":{\\"MyFileName\\":{\\"type\ -\\":\\"String\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}},\\"typeProperties\\":{\\"format\\":{\\"type\\":\\"TextF\ -ormat\\"},\\"fileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"},\\"folderPath\\":{\\"typ\ -e\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "myDataset" --factory-name "myFactoryName" \ ---resource-group "myResourceGroup" +\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"},\\"parameters\\":{\\"MyFileName\\":{\\\ +"type\\":\\"String\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}},\\"typeProperties\\":{\\"format\\":{\\"type\\":\\"\ +TextFormat\\"},\\"fileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"},\\"folderPath\\":{\ +\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset" --factory-name \ +"exampleFactoryName" --resource-group "exampleResourceGroup" +""" + +helps['datafactory dataset update'] = """ + type: command + short-summary: "Creates or updates a dataset." + parameters: + - name: --folder + short-summary: "The folder that this Dataset is in. If not specified, Dataset will appear at the root level." + long-summary: | + Usage: --folder name=XX + + name: The name of the folder that this Dataset is in. + examples: + - name: Datasets_Update + text: |- + az datafactory dataset update --description "Example description" --linked-service-name \ +"{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"}" --parameters \ +"{\\"MyFileName\\":{\\"type\\":\\"String\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}}" --name "exampleDataset" \ +--factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" """ helps['datafactory dataset delete'] = """ type: command - short-summary: Deletes a dataset. + short-summary: "Deletes a dataset." examples: - name: Datasets_Delete text: |- - az datafactory dataset delete --name "myDataset" --factory-name "myFactoryName" --resource-group \ -"myResourceGroup" + az datafactory dataset delete --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" """ helps['datafactory pipeline'] = """ @@ -509,78 +536,81 @@ helps['datafactory pipeline list'] = """ type: command - short-summary: Lists pipelines. + short-summary: "Lists pipelines." examples: - name: Pipelines_ListByFactory text: |- - az datafactory pipeline list --factory-name "myFactoryName" --resource-group "myResourceGroup" + az datafactory pipeline list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" """ helps['datafactory pipeline show'] = """ type: command - short-summary: Gets a pipeline. + short-summary: "Gets a pipeline." examples: - name: Pipelines_Get text: |- - az datafactory pipeline show --factory-name "myFactoryName" --name "myPipeline" --resource-group \ -"myResourceGroup" + az datafactory pipeline show --factory-name "exampleFactoryName" --name "examplePipeline" \ +--resource-group "exampleResourceGroup" """ helps['datafactory pipeline create'] = """ type: command - short-summary: Creates or updates a pipeline. + short-summary: "Creates or updates a pipeline." examples: - name: Pipelines_Create text: |- - az datafactory pipeline create --factory-name "myFactoryName" --pipeline "{\\"activities\\":[{\\"name\\"\ -:\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\":\\"ExampleCo\ -pyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\"\ -:\\"examplecontainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"myDataset\\"}],\\"outputs\ -\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"\ -@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"myDataset\\"}],\\"typeProperties\\":{\\"d\ -ataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"BlobSource\\"}}}],\\"isSeq\ -uential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}\ -],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}},\\"variabl\ -es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\"JobId\\":{\\"type\\":\\"Expression\\",\ -\\"value\\":\\"@pipeline().parameters.JobId\\"}}}" --name "myPipeline" --resource-group "myResourceGroup" + az datafactory pipeline create --factory-name "exampleFactoryName" --pipeline \ +"{\\"activities\\":[{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activiti\ +es\\":[{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"\ +parameters\\":{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\ +\\":\\"exampleDataset\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"typ\ +e\\":\\"Expression\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exam\ +pleDataset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\"\ +:{\\"type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipe\ +line().parameters.OutputBlobNameList\\"}}}],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameLi\ +st\\":{\\"type\\":\\"Array\\"}},\\"variables\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":\ +{\\"JobId\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.JobId\\"}}}" --name "examplePipeline" \ +--resource-group "exampleResourceGroup" """ helps['datafactory pipeline update'] = """ type: command - short-summary: Creates or updates a pipeline. + short-summary: "Creates or updates a pipeline." examples: - name: Pipelines_Update text: |- - az datafactory pipeline update --factory-name "myFactoryName" --description "Example description" \ + az datafactory pipeline update --factory-name "exampleFactoryName" --description "Example description" \ --activities "[{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\"\ :[{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"param\ eters\\":{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\\ -"myDataset\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Exp\ -ression\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"myDataset\\"}],\ -\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"\ -BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameter\ -s.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --name "myPipeline" \ ---resource-group "myResourceGroup" +"exampleDataset\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\ +\\"Expression\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDa\ +taset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"\ +type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline(\ +).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --name \ +"examplePipeline" --resource-group "exampleResourceGroup" """ helps['datafactory pipeline delete'] = """ type: command - short-summary: Deletes a pipeline. + short-summary: "Deletes a pipeline." examples: - name: Pipelines_Delete text: |- - az datafactory pipeline delete --factory-name "myFactoryName" --name "myPipeline" --resource-group \ -"myResourceGroup" + az datafactory pipeline delete --factory-name "exampleFactoryName" --name "examplePipeline" \ +--resource-group "exampleResourceGroup" """ helps['datafactory pipeline create-run'] = """ type: command - short-summary: Creates a run of a pipeline. + short-summary: "Creates a run of a pipeline." examples: - name: Pipelines_CreateRun text: |- - az datafactory pipeline create-run --factory-name "myFactoryName" --parameters \ -"{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}" --name "myPipeline" --resource-group "myResourceGroup" + az datafactory pipeline create-run --factory-name "exampleFactoryName" --parameters \ +"{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" """ helps['datafactory pipeline-run'] = """ @@ -590,30 +620,30 @@ helps['datafactory pipeline-run show'] = """ type: command - short-summary: Get a pipeline run by its run ID. + short-summary: "Get a pipeline run by its run ID." examples: - name: PipelineRuns_Get text: |- - az datafactory pipeline-run show --factory-name "myFactoryName" --resource-group "myResourceGroup" \ ---run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" + az datafactory pipeline-run show --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" """ helps['datafactory pipeline-run cancel'] = """ type: command - short-summary: Cancel a pipeline run by its run ID. + short-summary: "Cancel a pipeline run by its run ID." examples: - name: PipelineRuns_Cancel text: |- - az datafactory pipeline-run cancel --factory-name "myFactoryName" --resource-group "myResourceGroup" \ ---run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" + az datafactory pipeline-run cancel --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" """ helps['datafactory pipeline-run query-by-factory'] = """ type: command - short-summary: Query pipeline runs in the factory based on input filter conditions. + short-summary: "Query pipeline runs in the factory based on input filter conditions." parameters: - name: --filters - short-summary: List of filters. + short-summary: "List of filters." long-summary: | Usage: --filters operand=XX operator=XX values=XX @@ -625,7 +655,7 @@ Multiple actions can be specified by using more than one --filters argument. - name: --order-by - short-summary: List of OrderBy option. + short-summary: "List of OrderBy option." long-summary: | Usage: --order-by order-by=XX order=XX @@ -638,9 +668,9 @@ examples: - name: PipelineRuns_QueryByFactory text: |- - az datafactory pipeline-run query-by-factory --factory-name "myFactoryName" --filters \ -operand="PipelineName" operator="Equals" values="myPipeline" --last-updated-after "2018-06-16T00:36:44.3345758Z" \ ---last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group "myResourceGroup" + az datafactory pipeline-run query-by-factory --factory-name "exampleFactoryName" --filters \ +operand="PipelineName" operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:36:44.3345758Z" \ +--last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" """ helps['datafactory activity-run'] = """ @@ -650,10 +680,10 @@ helps['datafactory activity-run query-by-pipeline-run'] = """ type: command - short-summary: Query activity runs based on input filter conditions. + short-summary: "Query activity runs based on input filter conditions." parameters: - name: --filters - short-summary: List of filters. + short-summary: "List of filters." long-summary: | Usage: --filters operand=XX operator=XX values=XX @@ -665,7 +695,7 @@ Multiple actions can be specified by using more than one --filters argument. - name: --order-by - short-summary: List of OrderBy option. + short-summary: "List of OrderBy option." long-summary: | Usage: --order-by order-by=XX order=XX @@ -678,9 +708,9 @@ examples: - name: ActivityRuns_QueryByPipelineRun text: |- - az datafactory activity-run query-by-pipeline-run --factory-name "myFactoryName" --last-updated-after \ -"2018-06-16T00:36:44.3345758Z" --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group "myResourceGroup" \ ---run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" + az datafactory activity-run query-by-pipeline-run --factory-name "exampleFactoryName" \ +--last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before "2018-06-16T00:49:48.3686473Z" \ +--resource-group "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" """ helps['datafactory trigger'] = """ @@ -690,104 +720,115 @@ helps['datafactory trigger list'] = """ type: command - short-summary: Lists triggers. + short-summary: "Lists triggers." examples: - name: Triggers_ListByFactory text: |- - az datafactory trigger list --factory-name "myFactoryName" --resource-group "myResourceGroup" + az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" """ helps['datafactory trigger show'] = """ type: command - short-summary: Gets a trigger. + short-summary: "Gets a trigger." examples: - name: Triggers_Get text: |- - az datafactory trigger show --factory-name "myFactoryName" --resource-group "myResourceGroup" --name \ -"myTrigger" + az datafactory trigger show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" """ helps['datafactory trigger create'] = """ type: command - short-summary: Creates or updates a trigger. + short-summary: "Creates or updates a trigger." examples: - name: Triggers_Create text: |- - az datafactory trigger create --factory-name "myFactoryName" --resource-group "myResourceGroup" \ ---properties "{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exampl\ -eoutput.csv\\"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"myPipeline\\"}}],\\\ -"typeProperties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"\ -interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "myTrigger" + az datafactory trigger create --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --properties "{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlo\ +bNameList\\":[\\"exampleoutput.csv\\"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\"\ +:\\"examplePipeline\\"}}],\\"typeProperties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"f\ +requency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}\ +}" --name "exampleTrigger" +""" + +helps['datafactory trigger update'] = """ + type: command + short-summary: "Creates or updates a trigger." + examples: + - name: Triggers_Update + text: |- + az datafactory trigger update --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --description "Example description" --name "exampleTrigger" """ helps['datafactory trigger delete'] = """ type: command - short-summary: Deletes a trigger. + short-summary: "Deletes a trigger." examples: - name: Triggers_Delete text: |- - az datafactory trigger delete --factory-name "myFactoryName" --resource-group "myResourceGroup" --name \ -"myTrigger" + az datafactory trigger delete --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" """ helps['datafactory trigger get-event-subscription-status'] = """ type: command - short-summary: Get a trigger's event subscription status. + short-summary: "Get a trigger's event subscription status." examples: - name: Triggers_GetEventSubscriptionStatus text: |- - az datafactory trigger get-event-subscription-status --factory-name "myFactoryName" --resource-group \ -"myResourceGroup" --name "myTrigger" + az datafactory trigger get-event-subscription-status --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" --name "exampleTrigger" """ helps['datafactory trigger query-by-factory'] = """ type: command - short-summary: Query triggers. + short-summary: "Query triggers." examples: - name: Triggers_QueryByFactory text: |- - az datafactory trigger query-by-factory --factory-name "myFactoryName" --parent-trigger-name \ -"myTrigger" --resource-group "myResourceGroup" + az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --parent-trigger-name \ +"exampleTrigger" --resource-group "exampleResourceGroup" """ helps['datafactory trigger start'] = """ type: command - short-summary: Starts a trigger. + short-summary: "Starts a trigger." examples: - name: Triggers_Start text: |- - az datafactory trigger start --factory-name "myFactoryName" --resource-group "myResourceGroup" --name \ -"myTrigger" + az datafactory trigger start --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" """ helps['datafactory trigger stop'] = """ type: command - short-summary: Stops a trigger. + short-summary: "Stops a trigger." examples: - name: Triggers_Stop text: |- - az datafactory trigger stop --factory-name "myFactoryName" --resource-group "myResourceGroup" --name \ -"myTrigger" + az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" """ helps['datafactory trigger subscribe-to-event'] = """ type: command - short-summary: Subscribe event trigger to events. + short-summary: "Subscribe event trigger to events." examples: - name: Triggers_SubscribeToEvents text: |- - az datafactory trigger subscribe-to-event --factory-name "myFactoryName" --resource-group \ -"myResourceGroup" --name "myTrigger" + az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" """ helps['datafactory trigger unsubscribe-from-event'] = """ type: command - short-summary: Unsubscribe event trigger from events. + short-summary: "Unsubscribe event trigger from events." examples: - name: Triggers_UnsubscribeFromEvents text: |- - az datafactory trigger unsubscribe-from-event --factory-name "myFactoryName" --resource-group \ -"myResourceGroup" --name "myTrigger" + az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" """ helps['datafactory trigger wait'] = """ @@ -796,8 +837,8 @@ examples: - name: Pause executing next line of CLI script until the datafactory trigger is successfully created. text: |- - az datafactory trigger wait --factory-name "myFactoryName" --resource-group "myResourceGroup" --name \ -"myTrigger" --created + az datafactory trigger wait --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" --created """ helps['datafactory trigger-run'] = """ @@ -805,12 +846,22 @@ short-summary: datafactory trigger-run """ +helps['datafactory trigger-run cancel'] = """ + type: command + short-summary: "Cancel a single trigger instance by runId." + examples: + - name: Triggers_Cancel + text: |- + az datafactory trigger-run cancel --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +""" + helps['datafactory trigger-run query-by-factory'] = """ type: command - short-summary: Query trigger runs. + short-summary: "Query trigger runs." parameters: - name: --filters - short-summary: List of filters. + short-summary: "List of filters." long-summary: | Usage: --filters operand=XX operator=XX values=XX @@ -822,7 +873,7 @@ Multiple actions can be specified by using more than one --filters argument. - name: --order-by - short-summary: List of OrderBy option. + short-summary: "List of OrderBy option." long-summary: | Usage: --order-by order-by=XX order=XX @@ -835,17 +886,17 @@ examples: - name: TriggerRuns_QueryByFactory text: |- - az datafactory trigger-run query-by-factory --factory-name "myFactoryName" --filters \ -operand="TriggerName" operator="Equals" values="myTrigger" --last-updated-after "2018-06-16T00:36:44.3345758Z" \ ---last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group "myResourceGroup" + az datafactory trigger-run query-by-factory --factory-name "exampleFactoryName" --filters \ +operand="TriggerName" operator="Equals" values="exampleTrigger" --last-updated-after "2018-06-16T00:36:44.3345758Z" \ +--last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" """ helps['datafactory trigger-run rerun'] = """ type: command - short-summary: Rerun single trigger instance by runId. + short-summary: "Rerun single trigger instance by runId." examples: - name: Triggers_Rerun text: |- - az datafactory trigger-run rerun --factory-name "myFactoryName" --resource-group "myResourceGroup" \ ---run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "myTrigger" + az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" """ diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 8f610faff04..19e001c560f 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -24,6 +24,7 @@ from azext_datafactory.action import ( AddFactoryVstsConfiguration, AddFactoryGitHubConfiguration, + AddFolder, AddFilters, AddOrderBy ) @@ -36,15 +37,17 @@ def load_arguments(self, _): with self.argument_context('datafactory factory show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n'], help='The factory name.', id_part='name') - c.argument('if_none_match', help='ETag of the factory entity. Should only be specified for get. If the ETag ' - 'matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', + id_part='name') + c.argument('if_none_match', type=str, help='ETag of the factory entity. Should only be specified for get. If ' + 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') with self.argument_context('datafactory factory create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n'], help='The factory name.') - c.argument('if_match', help='ETag of the factory entity. Should only be specified for update, for which it ' - 'should match existing entity or can be * for unconditional update.') + c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, + help='The factory name.') + c.argument('if_match', type=str, help='ETag of the factory entity. Should only be specified for update, for ' + 'which it should match existing entity or can be * for unconditional update.') c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group) c.argument('tags', tags_type) @@ -54,19 +57,23 @@ def load_arguments(self, _): 'GitHub repo information.', arg_group='RepoConfiguration') c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' 'value: json-string/@json-file.') + c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether or not ' + 'public network access is allowed for the data factory.') with self.argument_context('datafactory factory update') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n'], help='The factory name.', id_part='name') + c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', + id_part='name') c.argument('tags', tags_type) with self.argument_context('datafactory factory delete') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n'], help='The factory name.', id_part='name') + c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', + id_part='name') with self.argument_context('datafactory factory configure-factory-repo') as c: c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') - c.argument('factory_resource_id', help='The factory resource id.') + c.argument('factory_resource_id', type=str, help='The factory resource id.') c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS ' 'repo information.', arg_group='RepoConfiguration') c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s ' @@ -74,55 +81,60 @@ def load_arguments(self, _): with self.argument_context('datafactory factory get-data-plane-access') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n'], help='The factory name.', id_part='name') - c.argument('permissions', help='The string with permissions for Data Plane access. Currently only \'r\' is ' - 'supported which grants read only access.') - c.argument('access_resource_path', help='The resource path to get access relative to factory. Currently only ' - 'empty string is supported which corresponds to the factory resource.') - c.argument('profile_name', help='The name of the profile. Currently only the default is supported. The default ' - 'value is DefaultProfile.') - c.argument('start_time', help='Start time for the token. If not specified the current time will be used.') - c.argument('expire_time', help='Expiration time for the token. Maximum duration for the token is eight hours ' - 'and by default the token will expire in eight hours.') + c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', + id_part='name') + c.argument('permissions', type=str, help='The string with permissions for Data Plane access. Currently only ' + '\'r\' is supported which grants read only access.') + c.argument('access_resource_path', type=str, help='The resource path to get access relative to factory. ' + 'Currently only empty string is supported which corresponds to the factory resource.') + c.argument('profile_name', type=str, help='The name of the profile. Currently only the default is supported. ' + 'The default value is DefaultProfile.') + c.argument('start_time', type=str, help='Start time for the token. If not specified the current time will be ' + 'used.') + c.argument('expire_time', type=str, help='Expiration time for the token. Maximum duration for the token is ' + 'eight hours and by default the token will expire in eight hours.') with self.argument_context('datafactory factory get-git-hub-access-token') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n'], help='The factory name.', id_part='name') - c.argument('git_hub_access_code', help='GitHub access code.') - c.argument('git_hub_client_id', help='GitHub application client ID.') - c.argument('git_hub_access_token_base_url', help='GitHub access token base URL.') + c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', + id_part='name') + c.argument('git_hub_access_code', type=str, help='GitHub access code.') + c.argument('git_hub_client_id', type=str, help='GitHub application client ID.') + c.argument('git_hub_access_token_base_url', type=str, help='GitHub access token base URL.') with self.argument_context('datafactory integration-runtime list') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') + c.argument('factory_name', type=str, help='The factory name.') with self.argument_context('datafactory integration-runtime show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') - c.argument('if_none_match', help='ETag of the integration runtime entity. Should only be specified for get. If ' - 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the integration runtime entity. Should only be specified ' + 'for get. If the ETag matches the existing entity tag, or if * was provided, then no content will ' + 'be returned.') with self.argument_context('datafactory integration-runtime linked-integration-runtime create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('integration_runtime_name', help='The integration runtime name.') - c.argument('name', help='The name of the linked integration runtime.') - c.argument('subscription_id', - help='The ID of the subscription that the linked integration runtime belongs to.') - c.argument('data_factory_name', help='The name of the data factory that the linked integration runtime belongs ' - 'to.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('integration_runtime_name', type=str, help='The integration runtime name.') + c.argument('name', type=str, help='The name of the linked integration runtime.') + c.argument('subscription_id', type=str, help='The ID of the subscription that the linked integration runtime ' + 'belongs to.') + c.argument('data_factory_name', type=str, help='The name of the data factory that the linked integration ' + 'runtime belongs to.') c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group) with self.argument_context('datafactory integration-runtime managed create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') - c.argument('if_match', help='ETag of the integration runtime entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('description', help='Integration runtime description.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.') + c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' + 'update, for which it should match existing entity or can be * for unconditional update.') + c.argument('description', type=str, help='Integration runtime description.') c.argument('type_properties_compute_properties', type=validate_file_or_dict, help='The compute resource for ' 'managed integration runtime. Expected value: json-string/@json-file.') c.argument('type_properties_ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed ' @@ -130,236 +142,289 @@ def load_arguments(self, _): with self.argument_context('datafactory integration-runtime self-hosted create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') - c.argument('if_match', help='ETag of the integration runtime entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('description', help='Integration runtime description.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.') + c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' + 'update, for which it should match existing entity or can be * for unconditional update.') + c.argument('description', type=str, help='Integration runtime description.') c.argument('type_properties_linked_info', type=validate_file_or_dict, help='The base definition of a linked ' 'integration runtime. Expected value: json-string/@json-file.') with self.argument_context('datafactory integration-runtime update') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') c.argument('auto_update', arg_type=get_enum_type(['On', 'Off']), help='Enables or disables the auto-update ' 'feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.' '') - c.argument('update_delay_offset', help='The time offset (in hours) in the day, e.g., PT03H is 3 hours. The ' - 'integration runtime auto update will happen on that time.') + c.argument('update_delay_offset', type=str, help='The time offset (in hours) in the day, e.g., PT03H is 3 ' + 'hours. The integration runtime auto update will happen on that time.') with self.argument_context('datafactory integration-runtime delete') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime get-connection-info') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime get-monitoring-data') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime get-status') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime list-auth-key') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.') with self.argument_context('datafactory integration-runtime regenerate-auth-key') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') c.argument('key_name', arg_type=get_enum_type(['authKey1', 'authKey2']), help='The name of the authentication ' 'key to regenerate.') with self.argument_context('datafactory integration-runtime remove-link') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') - c.argument('linked_factory_name', help='The data factory name for linked integration runtime.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') + c.argument('linked_factory_name', type=str, help='The data factory name for linked integration runtime.') with self.argument_context('datafactory integration-runtime start') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime stop') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime sync-credentials') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime upgrade') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') with self.argument_context('datafactory integration-runtime wait') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', - id_part='child_name_1') - c.argument('if_none_match', help='ETag of the integration runtime entity. Should only be specified for get. If ' - 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.', id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the integration runtime entity. Should only be specified ' + 'for get. If the ETag matches the existing entity tag, or if * was provided, then no content will ' + 'be returned.') with self.argument_context('datafactory integration-runtime-node show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', help='The integration runtime node name.', id_part='child_name_2') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') + c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') with self.argument_context('datafactory integration-runtime-node update') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', help='The integration runtime node name.', id_part='child_name_2') - c.argument('concurrent_jobs_limit', help='The number of concurrent jobs permitted to run on the integration ' - 'runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') + c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') + c.argument('concurrent_jobs_limit', type=int, help='The number of concurrent jobs permitted to run on the ' + 'integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.') with self.argument_context('datafactory integration-runtime-node delete') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', help='The integration runtime node name.', id_part='child_name_2') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') + c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') with self.argument_context('datafactory integration-runtime-node get-ip-address') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('integration_runtime_name', help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', help='The integration runtime node name.', id_part='child_name_2') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') + c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') with self.argument_context('datafactory linked-service list') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') + c.argument('factory_name', type=str, help='The factory name.') with self.argument_context('datafactory linked-service show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('linked_service_name', options_list=['--name', '-n'], help='The linked service name.', id_part='' - 'child_name_1') - c.argument('if_none_match', help='ETag of the linked service entity. Should only be specified for get. If the ' - 'ETag matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' + 'linked service name.', id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the linked service entity. Should only be specified for ' + 'get. If the ETag matches the existing entity tag, or if * was provided, then no content will be ' + 'returned.') with self.argument_context('datafactory linked-service create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('linked_service_name', options_list=['--name', '-n'], help='The linked service name.') - c.argument('if_match', help='ETag of the linkedService entity. Should only be specified for update, for which ' - 'it should match existing entity or can be * for unconditional update.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' + 'linked service name.') + c.argument('if_match', type=str, help='ETag of the linkedService entity. Should only be specified for update, ' + 'for which it should match existing entity or can be * for unconditional update.') c.argument('properties', type=validate_file_or_dict, help='Properties of linked service. Expected value: ' 'json-string/@json-file.') + with self.argument_context('datafactory linked-service update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' + 'linked service name.', id_part='child_name_1') + c.argument('if_match', type=str, help='ETag of the linkedService entity. Should only be specified for update, ' + 'for which it should match existing entity or can be * for unconditional update.') + c.argument('connect_via', type=validate_file_or_dict, help='The integration runtime reference. Expected value: ' + 'json-string/@json-file.') + c.argument('description', type=str, help='Linked service description.') + c.argument('parameters', type=validate_file_or_dict, help='Parameters for linked service. Expected value: ' + 'json-string/@json-file.') + c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' + 'linked service. Expected value: json-string/@json-file.') + c.ignore('properties') + with self.argument_context('datafactory linked-service delete') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('linked_service_name', options_list=['--name', '-n'], help='The linked service name.', id_part='' - 'child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' + 'linked service name.', id_part='child_name_1') with self.argument_context('datafactory dataset list') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') + c.argument('factory_name', type=str, help='The factory name.') with self.argument_context('datafactory dataset show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('dataset_name', options_list=['--name', '-n'], help='The dataset name.', id_part='child_name_1') - c.argument('if_none_match', help='ETag of the dataset entity. Should only be specified for get. If the ETag ' - 'matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, help='The dataset name.', + id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the dataset entity. Should only be specified for get. If ' + 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') with self.argument_context('datafactory dataset create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('dataset_name', options_list=['--name', '-n'], help='The dataset name.') - c.argument('if_match', help='ETag of the dataset entity. Should only be specified for update, for which it ' - 'should match existing entity or can be * for unconditional update.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, + help='The dataset name.') + c.argument('if_match', type=str, help='ETag of the dataset entity. Should only be specified for update, for ' + 'which it should match existing entity or can be * for unconditional update.') c.argument('properties', type=validate_file_or_dict, help='Dataset properties. Expected value: ' 'json-string/@json-file.') + with self.argument_context('datafactory dataset update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, help='The dataset name.', + id_part='child_name_1') + c.argument('if_match', type=str, help='ETag of the dataset entity. Should only be specified for update, for ' + 'which it should match existing entity or can be * for unconditional update.') + c.argument('description', type=str, help='Dataset description.') + c.argument('structure', type=validate_file_or_dict, help='Columns that define the structure of the dataset. ' + 'Type: array (or Expression with resultType array), itemType: DatasetDataElement. Expected value: ' + 'json-string/@json-file.') + c.argument('schema', type=validate_file_or_dict, help='Columns that define the physical type schema of the ' + 'dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. ' + 'Expected value: json-string/@json-file.') + c.argument('linked_service_name', type=validate_file_or_dict, help='Linked service reference. Expected value: ' + 'json-string/@json-file.') + c.argument('parameters', type=validate_file_or_dict, help='Parameters for dataset. Expected value: ' + 'json-string/@json-file.') + c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' + 'Dataset. Expected value: json-string/@json-file.') + c.argument('folder', action=AddFolder, nargs='*', help='The folder that this Dataset is in. If not specified, ' + 'Dataset will appear at the root level.') + c.ignore('properties') + with self.argument_context('datafactory dataset delete') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('dataset_name', options_list=['--name', '-n'], help='The dataset name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, help='The dataset name.', + id_part='child_name_1') with self.argument_context('datafactory pipeline list') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') + c.argument('factory_name', type=str, help='The factory name.') with self.argument_context('datafactory pipeline show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n'], help='The pipeline name.', id_part='child_name_1') - c.argument('if_none_match', help='ETag of the pipeline entity. Should only be specified for get. If the ETag ' - 'matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' + 'name.', id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the pipeline entity. Should only be specified for get. If ' + 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') with self.argument_context('datafactory pipeline create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('pipeline_name', options_list=['--name', '-n'], help='The pipeline name.') - c.argument('if_match', help='ETag of the pipeline entity. Should only be specified for update, for which it ' - 'should match existing entity or can be * for unconditional update.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' + 'name.') + c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' + 'which it should match existing entity or can be * for unconditional update.') c.argument('pipeline', type=validate_file_or_dict, help='Pipeline resource definition. Expected value: ' 'json-string/@json-file.') with self.argument_context('datafactory pipeline update') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n'], help='The pipeline name.', id_part='child_name_1') - c.argument('if_match', help='ETag of the pipeline entity. Should only be specified for update, for which it ' - 'should match existing entity or can be * for unconditional update.') - c.argument('description', help='The description of the pipeline.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' + 'name.', id_part='child_name_1') + c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' + 'which it should match existing entity or can be * for unconditional update.') + c.argument('description', type=str, help='The description of the pipeline.') c.argument('activities', type=validate_file_or_dict, help='List of activities in pipeline. Expected value: ' 'json-string/@json-file.') c.argument('parameters', type=validate_file_or_dict, help='List of parameters for pipeline. Expected value: ' 'json-string/@json-file.') c.argument('variables', type=validate_file_or_dict, help='List of variables for pipeline. Expected value: ' 'json-string/@json-file.') - c.argument('concurrency', help='The max number of concurrent runs for the pipeline.') + c.argument('concurrency', type=int, help='The max number of concurrent runs for the pipeline.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' 'Pipeline. Expected value: json-string/@json-file.') c.argument('run_dimensions', type=validate_file_or_dict, help='Dimensions emitted by Pipeline. Expected value: ' 'json-string/@json-file.') - c.argument('folder_name', help='The name of the folder that this Pipeline is in.') + c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.') c.ignore('pipeline') with self.argument_context('datafactory pipeline delete') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n'], help='The pipeline name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' + 'name.', id_part='child_name_1') with self.argument_context('datafactory pipeline create-run') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('pipeline_name', options_list=['--name', '-n'], help='The pipeline name.') - c.argument('reference_pipeline_run_id', help='The pipeline run identifier. If run ID is specified the ' - 'parameters of the specified run will be used to create a new run.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' + 'name.') + c.argument('reference_pipeline_run_id', type=str, help='The pipeline run identifier. If run ID is specified ' + 'the parameters of the specified run will be used to create a new run.') c.argument('is_recovery', arg_type=get_three_state_flag(), help='Recovery mode flag. If recovery mode is set ' 'to true, the specified referenced pipeline run and the new run will be grouped under the same ' 'groupId.') - c.argument('start_activity_name', help='In recovery mode, the rerun will start from this activity. If not ' - 'specified, all activities will run.') + c.argument('start_activity_name', type=str, help='In recovery mode, the rerun will start from this activity. ' + 'If not specified, all activities will run.') c.argument('start_from_failure', arg_type=get_three_state_flag(), help='In recovery mode, if set to true, the ' 'rerun will start from failed activities. The property will be used only if startActivityName is ' 'not specified.') @@ -368,21 +433,21 @@ def load_arguments(self, _): with self.argument_context('datafactory pipeline-run show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('run_id', help='The pipeline run identifier.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_1') with self.argument_context('datafactory pipeline-run cancel') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('run_id', help='The pipeline run identifier.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_1') c.argument('is_recursive', arg_type=get_three_state_flag(), help='If true, cancel all the Child pipelines that ' 'are triggered by the current pipeline.') with self.argument_context('datafactory pipeline-run query-by-factory') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('continuation_token', help='The continuation token for getting the next page of results. Null for ' - 'first page.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' + 'Null for first page.') c.argument('last_updated_after', help='The time at or after which the run event was updated in \'ISO 8601\' ' 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' @@ -392,10 +457,10 @@ def load_arguments(self, _): with self.argument_context('datafactory activity-run query-by-pipeline-run') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('run_id', help='The pipeline run identifier.', id_part='child_name_1') - c.argument('continuation_token', help='The continuation token for getting the next page of results. Null for ' - 'first page.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_1') + c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' + 'Null for first page.') c.argument('last_updated_after', help='The time at or after which the run event was updated in \'ISO 8601\' ' 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' @@ -405,74 +470,101 @@ def load_arguments(self, _): with self.argument_context('datafactory trigger list') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') + c.argument('factory_name', type=str, help='The factory name.') with self.argument_context('datafactory trigger show') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') - c.argument('if_none_match', help='ETag of the trigger entity. Should only be specified for get. If the ETag ' - 'matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the trigger entity. Should only be specified for get. If ' + 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') with self.argument_context('datafactory trigger create') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.') - c.argument('if_match', help='ETag of the trigger entity. Should only be specified for update, for which it ' - 'should match existing entity or can be * for unconditional update.') + c.argument('factory_name', type=str, help='The factory name.') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, + help='The trigger name.') + c.argument('if_match', type=str, help='ETag of the trigger entity. Should only be specified for update, for ' + 'which it should match existing entity or can be * for unconditional update.') c.argument('properties', type=validate_file_or_dict, help='Properties of the trigger. Expected value: ' 'json-string/@json-file.') + with self.argument_context('datafactory trigger update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') + c.argument('if_match', type=str, help='ETag of the trigger entity. Should only be specified for update, for ' + 'which it should match existing entity or can be * for unconditional update.') + c.argument('description', type=str, help='Trigger description.') + c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' + 'trigger. Expected value: json-string/@json-file.') + c.ignore('properties') + with self.argument_context('datafactory trigger delete') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') with self.argument_context('datafactory trigger get-event-subscription-status') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') with self.argument_context('datafactory trigger query-by-factory') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('continuation_token', help='The continuation token for getting the next page of results. Null for ' - 'first page.') - c.argument('parent_trigger_name', help='The name of the parent TumblingWindowTrigger to get the child rerun ' - 'triggers') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' + 'Null for first page.') + c.argument('parent_trigger_name', type=str, help='The name of the parent TumblingWindowTrigger to get the ' + 'child rerun triggers') with self.argument_context('datafactory trigger start') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') with self.argument_context('datafactory trigger stop') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') with self.argument_context('datafactory trigger subscribe-to-event') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') with self.argument_context('datafactory trigger unsubscribe-from-event') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') with self.argument_context('datafactory trigger wait') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') - c.argument('if_none_match', help='ETag of the trigger entity. Should only be specified for get. If the ETag ' - 'matches the existing entity tag, or if * was provided, then no content will be returned.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', + id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the trigger entity. Should only be specified for get. If ' + 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') + + with self.argument_context('datafactory trigger-run cancel') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1') + c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2') with self.argument_context('datafactory trigger-run query-by-factory') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('continuation_token', help='The continuation token for getting the next page of results. Null for ' - 'first page.') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' + 'Null for first page.') c.argument('last_updated_after', help='The time at or after which the run event was updated in \'ISO 8601\' ' 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' @@ -482,6 +574,6 @@ def load_arguments(self, _): with self.argument_context('datafactory trigger-run rerun') as c: c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.', id_part='name') - c.argument('trigger_name', help='The trigger name.', id_part='child_name_1') - c.argument('run_id', help='The pipeline run identifier.', id_part='child_name_2') + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1') + c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2') diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py index fb3077a19fa..ec9616c8672 100644 --- a/src/datafactory/azext_datafactory/generated/action.py +++ b/src/datafactory/azext_datafactory/generated/action.py @@ -10,8 +10,8 @@ # pylint: disable=protected-access import argparse -from knack.util import CLIError from collections import defaultdict +from knack.util import CLIError class AddFactoryVstsConfiguration(argparse.Action): @@ -82,6 +82,28 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use return d +class AddFolder(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.folder = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'name': + d['name'] = v[0] + return d + + class AddFilters(argparse._AppendAction): def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py index 55d431ac0a9..cfc2a3de83b 100644 --- a/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/datafactory/azext_datafactory/generated/commands.py @@ -26,7 +26,7 @@ def load_command_table(self, _): g.custom_show_command('show', 'datafactory_factory_show') g.custom_command('create', 'datafactory_factory_create') g.custom_command('update', 'datafactory_factory_update') - g.custom_command('delete', 'datafactory_factory_delete') + g.custom_command('delete', 'datafactory_factory_delete', confirmation=True) g.custom_command('configure-factory-repo', 'datafactory_factory_configure_factory_repo') g.custom_command('get-data-plane-access', 'datafactory_factory_get_data_plane_access') g.custom_command('get-git-hub-access-token', 'datafactory_factory_get_git_hub_access_token') @@ -45,7 +45,7 @@ def load_command_table(self, _): g.custom_command('managed create', 'datafactory_integration_runtime_managed_create') g.custom_command('self-hosted create', 'datafactory_integration_runtime_self_hosted_create') g.custom_command('update', 'datafactory_integration_runtime_update') - g.custom_command('delete', 'datafactory_integration_runtime_delete') + g.custom_command('delete', 'datafactory_integration_runtime_delete', confirmation=True) g.custom_command('get-connection-info', 'datafactory_integration_runtime_get_connection_info') g.custom_command('get-monitoring-data', 'datafactory_integration_runtime_get_monitoring_data') g.custom_command('get-status', 'datafactory_integration_runtime_get_status') @@ -67,7 +67,7 @@ def load_command_table(self, _): client_factory=cf_integration_runtime_node, is_experimental=True) as g: g.custom_show_command('show', 'datafactory_integration_runtime_node_show') g.custom_command('update', 'datafactory_integration_runtime_node_update') - g.custom_command('delete', 'datafactory_integration_runtime_node_delete') + g.custom_command('delete', 'datafactory_integration_runtime_node_delete', confirmation=True) g.custom_command('get-ip-address', 'datafactory_integration_runtime_node_get_ip_address') from azext_datafactory.generated._client_factory import cf_linked_service @@ -80,7 +80,9 @@ def load_command_table(self, _): g.custom_command('list', 'datafactory_linked_service_list') g.custom_show_command('show', 'datafactory_linked_service_show') g.custom_command('create', 'datafactory_linked_service_create') - g.custom_command('delete', 'datafactory_linked_service_delete') + g.generic_update_command('update', setter_arg_name='properties', custom_func_name='' + 'datafactory_linked_service_update') + g.custom_command('delete', 'datafactory_linked_service_delete', confirmation=True) from azext_datafactory.generated._client_factory import cf_dataset datafactory_dataset = CliCommandType( @@ -92,7 +94,9 @@ def load_command_table(self, _): g.custom_command('list', 'datafactory_dataset_list') g.custom_show_command('show', 'datafactory_dataset_show') g.custom_command('create', 'datafactory_dataset_create') - g.custom_command('delete', 'datafactory_dataset_delete') + g.generic_update_command('update', setter_arg_name='properties', + custom_func_name='datafactory_dataset_update') + g.custom_command('delete', 'datafactory_dataset_delete', confirmation=True) from azext_datafactory.generated._client_factory import cf_pipeline datafactory_pipeline = CliCommandType( @@ -105,7 +109,7 @@ def load_command_table(self, _): g.custom_show_command('show', 'datafactory_pipeline_show') g.custom_command('create', 'datafactory_pipeline_create') g.generic_update_command('update', setter_arg_name='pipeline', custom_func_name='datafactory_pipeline_update') - g.custom_command('delete', 'datafactory_pipeline_delete') + g.custom_command('delete', 'datafactory_pipeline_delete', confirmation=True) g.custom_command('create-run', 'datafactory_pipeline_create_run') from azext_datafactory.generated._client_factory import cf_pipeline_run @@ -138,7 +142,9 @@ def load_command_table(self, _): g.custom_command('list', 'datafactory_trigger_list') g.custom_show_command('show', 'datafactory_trigger_show') g.custom_command('create', 'datafactory_trigger_create') - g.custom_command('delete', 'datafactory_trigger_delete') + g.generic_update_command('update', setter_arg_name='properties', + custom_func_name='datafactory_trigger_update') + g.custom_command('delete', 'datafactory_trigger_delete', confirmation=True) g.custom_command('get-event-subscription-status', 'datafactory_trigger_get_event_subscription_status') g.custom_command('query-by-factory', 'datafactory_trigger_query_by_factory') g.custom_command('start', 'datafactory_trigger_start', supports_no_wait=True) @@ -155,5 +161,6 @@ def load_command_table(self, _): client_factory=cf_trigger_run) with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run, is_experimental=True) as g: + g.custom_command('cancel', 'datafactory_trigger_run_cancel') g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory') g.custom_command('rerun', 'datafactory_trigger_run_rerun') diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index 378bfa4363f..de15a604498 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -10,6 +10,7 @@ # pylint: disable=too-many-lines # pylint: disable=unused-argument +import json from knack.util import CLIError from azure.cli.core.util import sdk_no_wait @@ -38,7 +39,8 @@ def datafactory_factory_create(client, tags=None, factory_vsts_configuration=None, factory_git_hub_configuration=None, - global_parameters=None): + global_parameters=None, + public_network_access=None): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) @@ -53,8 +55,10 @@ def datafactory_factory_create(client, if_match=if_match, location=location, tags=tags, - identity={"type": "SystemAssigned"}, - repo_configuration=repo_configuration) + identity=None, + repo_configuration=repo_configuration, + global_parameters=global_parameters, + public_network_access=public_network_access) def datafactory_factory_update(client, @@ -64,7 +68,7 @@ def datafactory_factory_update(client, return client.update(resource_group_name=resource_group_name, factory_name=factory_name, tags=tags, - identity={"type": "SystemAssigned"}) + identity=json.loads("{\"type\": \"SystemAssigned\"}")) def datafactory_factory_delete(client, @@ -395,6 +399,26 @@ def datafactory_linked_service_create(client, properties=properties) +def datafactory_linked_service_update(instance, + resource_group_name, + factory_name, + linked_service_name, + if_match=None, + connect_via=None, + description=None, + parameters=None, + annotations=None): + if connect_via is not None: + instance.properties.connect_via = connect_via + if description is not None: + instance.properties.description = description + if parameters is not None: + instance.properties.parameters = parameters + if annotations is not None: + instance.properties.annotations = annotations + return instance.properties + + def datafactory_linked_service_delete(client, resource_group_name, factory_name, @@ -435,6 +459,35 @@ def datafactory_dataset_create(client, properties=properties) +def datafactory_dataset_update(instance, + resource_group_name, + factory_name, + dataset_name, + linked_service_name, + if_match=None, + description=None, + structure=None, + schema=None, + parameters=None, + annotations=None, + folder=None): + if description is not None: + instance.properties.description = description + if structure is not None: + instance.properties.structure = structure + if schema is not None: + instance.properties.schema = schema + if linked_service_name is not None: + instance.properties.linked_service_name = linked_service_name + if parameters is not None: + instance.properties.parameters = parameters + if annotations is not None: + instance.properties.annotations = annotations + if folder is not None: + instance.properties.folder = folder + return instance.properties + + def datafactory_dataset_delete(client, resource_group_name, factory_name, @@ -503,7 +556,7 @@ def datafactory_pipeline_update(instance, if run_dimensions is not None: instance.run_dimensions = run_dimensions if folder_name is not None: - instance.name_properties_folder_name = folder_name + instance.name_folder_name = folder_name return instance @@ -565,7 +618,7 @@ def datafactory_pipeline_run_query_by_factory(client, order_by=None): return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token=continuation_token, + continuation_token_parameter=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, @@ -584,7 +637,7 @@ def datafactory_activity_run_query_by_pipeline_run(client, return client.query_by_pipeline_run(resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, - continuation_token=continuation_token, + continuation_token_parameter=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, @@ -622,6 +675,20 @@ def datafactory_trigger_create(client, properties=properties) +def datafactory_trigger_update(instance, + resource_group_name, + factory_name, + trigger_name, + if_match=None, + description=None, + annotations=None): + if description is not None: + instance.properties.description = description + if annotations is not None: + instance.properties.annotations = annotations + return instance.properties + + def datafactory_trigger_delete(client, resource_group_name, factory_name, @@ -647,7 +714,7 @@ def datafactory_trigger_query_by_factory(client, parent_trigger_name=None): return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token=continuation_token, + continuation_token_parameter=continuation_token, parent_trigger_name=parent_trigger_name) @@ -699,6 +766,17 @@ def datafactory_trigger_unsubscribe_from_event(client, trigger_name=trigger_name) +def datafactory_trigger_run_cancel(client, + resource_group_name, + factory_name, + trigger_name, + run_id): + return client.cancel(resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + run_id=run_id) + + def datafactory_trigger_run_query_by_factory(client, resource_group_name, factory_name, @@ -709,7 +787,7 @@ def datafactory_trigger_run_query_by_factory(client, order_by=None): return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token=continuation_token, + continuation_token_parameter=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, diff --git a/src/datafactory/azext_datafactory/tests/__init__.py b/src/datafactory/azext_datafactory/tests/__init__.py index 9b7ec942a0b..50e0627daff 100644 --- a/src/datafactory/azext_datafactory/tests/__init__.py +++ b/src/datafactory/azext_datafactory/tests/__init__.py @@ -9,15 +9,23 @@ # regenerated. # -------------------------------------------------------------------------- import inspect +import logging import os import sys import traceback +import datetime as dt + from azure.core.exceptions import AzureError from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError +logger = logging.getLogger('azure.cli.testsdk') +logger.addHandler(logging.StreamHandler()) __path__ = __import__('pkgutil').extend_path(__path__, __name__) exceptions = [] +test_map = dict() +SUCCESSED = "successed" +FAILED = "failed" def try_manual(func): @@ -39,29 +47,64 @@ def get_func_to_call(): func_to_call = func try: func_to_call = import_manual_function(func) - print("Found manual override for {}(...)".format(func.__name__)) + func_to_call = import_manual_function(func) + logger.info("Found manual override for %s(...)", func.__name__) except (ImportError, AttributeError): pass return func_to_call def wrapper(*args, **kwargs): func_to_call = get_func_to_call() - print("running {}()...".format(func.__name__)) + logger.info("running %s()...", func.__name__) try: - return func_to_call(*args, **kwargs) - except (AssertionError, AzureError, CliTestError, CliExecutionError, JMESPathCheckAssertionError) as e: - print("--------------------------------------") - print("step exception: ", e) - print("--------------------------------------", file=sys.stderr) - print("step exception in {}: {}".format(func.__name__, e), file=sys.stderr) - traceback.print_exc() + test_map[func.__name__] = dict() + test_map[func.__name__]["result"] = SUCCESSED + test_map[func.__name__]["error_message"] = "" + test_map[func.__name__]["error_stack"] = "" + test_map[func.__name__]["error_normalized"] = "" + test_map[func.__name__]["start_dt"] = dt.datetime.utcnow() + ret = func_to_call(*args, **kwargs) + except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, + JMESPathCheckAssertionError) as e: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + test_map[func.__name__]["result"] = FAILED + test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] + test_map[func.__name__]["error_stack"] = traceback.format_exc().replace( + "\r\n", " ").replace("\n", " ")[:500] + logger.info("--------------------------------------") + logger.info("step exception: %s", e) + logger.error("--------------------------------------") + logger.error("step exception in %s: %s", func.__name__, e) + logger.info(traceback.format_exc()) exceptions.append((func.__name__, sys.exc_info())) + else: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + return ret if inspect.isclass(func): return get_func_to_call() return wrapper +def calc_coverage(filename): + filename = filename.split(".")[0] + coverage_name = filename + "_coverage.md" + with open(coverage_name, "w") as f: + f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n") + total = len(test_map) + covered = 0 + for k, v in test_map.items(): + if not k.startswith("step_"): + total -= 1 + continue + if v["result"] == SUCCESSED: + covered += 1 + f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" + "{end_dt}|\n".format(step_name=k, **v)) + f.write("Coverage: {}/{}\n".format(covered, total)) + print("Create coverage\n", file=sys.stderr) + + def raise_if(): if exceptions: if len(exceptions) <= 1: diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index 38c16c655b1..f3cef88ea30 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -10,13 +10,14 @@ import os from azure.cli.testsdk import ScenarioTest -from .. import try_manual, raise_if +from .. import try_manual, raise_if, calc_coverage from azure.cli.testsdk import ResourceGroupPreparer TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) +# Env setup @try_manual def setup(test, rg): pass @@ -27,49 +28,43 @@ def setup(test, rg): def step_factories_createorupdate(test, rg): test.cmd('az datafactory factory create ' '--location "East US" ' - '--name "{myFactoryName}" ' + '--name "{myFactory}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myFactoryName}"), - test.check('provisioningState', 'Succeeded') - ]) + checks=[]) # EXAMPLE: Factories_Update @try_manual def step_factories_update(test, rg): test.cmd('az datafactory factory update ' - '--name "{myFactoryName}" ' + '--name "{myFactory}" ' '--tags exampleTag="exampleValue" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myFactoryName}"), - test.check('provisioningState', 'Succeeded'), - test.check('tags.exampleTag', 'exampleValue') - ]) + checks=[]) # EXAMPLE: LinkedServices_Create @try_manual def step_linkedservices_create(test, rg): test.cmd('az datafactory linked-service create ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--properties "{{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{{\\"connectionString\\":{{\\"type\\":' '\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;Accoun' 'tKey=\\"}}}}}}" ' '--name "{myLinkedService}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myLinkedService}"), - test.check('properties.type', 'AzureStorage') - ]) + checks=[]) # EXAMPLE: LinkedServices_Update @try_manual def step_linkedservices_update(test, rg): - # EXAMPLE NOT FOUND! - pass + test.cmd('az datafactory linked-service update ' + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=[]) # EXAMPLE: Datasets_Create @@ -77,55 +72,69 @@ def step_linkedservices_update(test, rg): def step_datasets_create(test, rg): test.cmd('az datafactory dataset create ' '--properties "{{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{{\\"type\\":\\"LinkedServiceReference' - '\\",\\"referenceName\\":\\"myLinkedService\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Strin' - 'g\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\":' - '\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"}}' - ',\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' + '\\",\\"referenceName\\":\\"{myLinkedService}\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Str' + 'ing\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\' + '":\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"' + '}},\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' '--name "{myDataset}" ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myDataset}") - ]) + checks=[]) # EXAMPLE: Datasets_Update @try_manual def step_datasets_update(test, rg): - # EXAMPLE NOT FOUND! - pass + test.cmd('az datafactory dataset update ' + '--description "Example description" ' + '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' + '\\"}}" ' + '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' + '}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=[]) # EXAMPLE: Pipelines_Create @try_manual def step_pipelines_create(test, rg): test.cmd('az datafactory pipeline create ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typ' 'eProperties\\":{{\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs' '\\":[{{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",' - '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"myDataset\\"}}],\\"outputs\\":[{{\\"ty' - 'pe\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\' - '":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"myDataset\\"}}],\\"t' - 'ypeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{' - '\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"v' - 'alue\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"type\\' - '":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVariableA' - 'rray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",\\"v' - 'alue\\":\\"@pipeline().parameters.JobId\\"}}}}}}" ' + '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"' + 'type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value' + '\\":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],' + '\\"typeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\' + '":{{\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"ty' + 'pe\\":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVaria' + 'bleArray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.JobId\\"}}}}}}" ' '--name "{myPipeline}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myPipeline}") - ]) + checks=[]) # EXAMPLE: Pipelines_Update @try_manual def step_pipelines_update(test, rg): test.cmd('az datafactory pipeline update ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--description "Example description" ' + '--activities "[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{{' + '\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{{\\"type\\"' + ':\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\"' + ':\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"type\\":\\"Dataset' + 'Reference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@item()\\"}' + '},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"typeProperties' + '\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{\\"type\\":\\' + '"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@p' + 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' + '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' '--name "{myPipeline}" ' '--resource-group "{rg}"', checks=[]) @@ -135,31 +144,33 @@ def step_pipelines_update(test, rg): @try_manual def step_triggers_create(test, rg): test.cmd('az datafactory trigger create ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' - 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"{myEndTime}' - '\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"{myStartTime}\\",\\"timeZone\\":' - '\\"UTC\\"}}}}}}" ' + 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T' + '00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13' + '.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' '--name "{myTrigger}"', - checks=[ - test.check('name', "{myTrigger}") - ]) + checks=[]) # EXAMPLE: Triggers_Update @try_manual def step_triggers_update(test, rg): - # EXAMPLE NOT FOUND! - pass + test.cmd('az datafactory trigger update ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--description "Example description" ' + '--name "{myTrigger}"', + checks=[]) # EXAMPLE: IntegrationRuntimes_Create @try_manual def step_integrationruntimes_create(test, rg): - test.cmd('az datafactory integration-runtime managed create ' - '--factory-name "{myFactoryName}" ' + test.cmd('az datafactory integration-runtime self-hosted create ' + '--factory-name "{myFactory}" ' '--description "A selfhosted integration runtime" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', @@ -170,7 +181,7 @@ def step_integrationruntimes_create(test, rg): @try_manual def step_integrationruntimes_update(test, rg): test.cmd('az datafactory integration-runtime update ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}" ' '--auto-update "Off" ' @@ -180,13 +191,13 @@ def step_integrationruntimes_update(test, rg): # EXAMPLE: IntegrationRuntimes_CreateLinkedIntegrationRuntime @try_manual -def step_integrationruntimes_createlinkedintegrationruntime(test, rg): +def step_integrationruntimes_createlinkedintegrationru(test, rg): test.cmd('az datafactory integration-runtime linked-integration-runtime create ' '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' '--location "West US" ' '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--integration-runtime-name "{myIntegrationRuntime}" ' '--resource-group "{rg}" ' '--subscription-id "12345678-1234-1234-1234-12345678abc"', @@ -197,7 +208,7 @@ def step_integrationruntimes_createlinkedintegrationruntime(test, rg): @try_manual def step_pipelines_createrun(test, rg): test.cmd('az datafactory pipeline create-run ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' '--name "{myPipeline}" ' '--resource-group "{rg}"', @@ -208,12 +219,10 @@ def step_pipelines_createrun(test, rg): @try_manual def step_integrationruntimes_get(test, rg): test.cmd('az datafactory integration-runtime show ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myIntegrationRuntime}") - ]) + checks=[]) # EXAMPLE: RerunTriggers_ListByTrigger @@ -227,36 +236,30 @@ def step_reruntriggers_listbytrigger(test, rg): @try_manual def step_linkedservices_get(test, rg): test.cmd('az datafactory linked-service show ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myLinkedService}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myLinkedService}") - ]) + checks=[]) # EXAMPLE: PipelineRuns_Get @try_manual def step_pipelineruns_get(test, rg): test.cmd('az datafactory pipeline-run show ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' - '--run-id "{myRunId}"', - checks=[ - test.check('runId', "{myRunId}") - ]) + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=[]) # EXAMPLE: Pipelines_Get @try_manual def step_pipelines_get(test, rg): test.cmd('az datafactory pipeline show ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myPipeline}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myPipeline}") - ]) + checks=[]) # EXAMPLE: Datasets_Get @@ -264,7 +267,7 @@ def step_pipelines_get(test, rg): def step_datasets_get(test, rg): test.cmd('az datafactory dataset show ' '--name "{myDataset}" ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -273,7 +276,7 @@ def step_datasets_get(test, rg): @try_manual def step_triggers_get(test, rg): test.cmd('az datafactory trigger show ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--name "{myTrigger}"', checks=[]) @@ -283,7 +286,7 @@ def step_triggers_get(test, rg): @try_manual def step_integrationruntimes_listbyfactory(test, rg): test.cmd('az datafactory integration-runtime list ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -292,7 +295,7 @@ def step_integrationruntimes_listbyfactory(test, rg): @try_manual def step_linkedservices_listbyfactory(test, rg): test.cmd('az datafactory linked-service list ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -301,7 +304,7 @@ def step_linkedservices_listbyfactory(test, rg): @try_manual def step_pipelines_listbyfactory(test, rg): test.cmd('az datafactory pipeline list ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -310,7 +313,7 @@ def step_pipelines_listbyfactory(test, rg): @try_manual def step_triggers_listbyfactory(test, rg): test.cmd('az datafactory trigger list ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -319,7 +322,7 @@ def step_triggers_listbyfactory(test, rg): @try_manual def step_datasets_listbyfactory(test, rg): test.cmd('az datafactory dataset list ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -328,7 +331,7 @@ def step_datasets_listbyfactory(test, rg): @try_manual def step_factories_get(test, rg): test.cmd('az datafactory factory show ' - '--name "{myFactoryName}" ' + '--name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -381,7 +384,7 @@ def step_reruntriggers_stop(test, rg): @try_manual def step_integrationruntimes_regenerateauthkey(test, rg): test.cmd('az datafactory integration-runtime regenerate-auth-key ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--key-name "authKey2" ' '--resource-group "{rg}"', @@ -399,7 +402,7 @@ def step_triggerruns_rerun(test, rg): @try_manual def step_integrationruntimes_getconnectioninfo(test, rg): test.cmd('az datafactory integration-runtime get-connection-info ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', checks=[]) @@ -409,7 +412,7 @@ def step_integrationruntimes_getconnectioninfo(test, rg): @try_manual def step_integrationruntimes_synccredentials(test, rg): test.cmd('az datafactory integration-runtime sync-credentials ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', checks=[]) @@ -419,19 +422,17 @@ def step_integrationruntimes_synccredentials(test, rg): @try_manual def step_integrationruntimes_getmonitoringdata(test, rg): test.cmd('az datafactory integration-runtime get-monitoring-data ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myIntegrationRuntime}") - ]) + checks=[]) # EXAMPLE: IntegrationRuntimes_ListAuthKeys @try_manual def step_integrationruntimes_listauthkeys(test, rg): test.cmd('az datafactory integration-runtime list-auth-key ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', checks=[]) @@ -441,9 +442,9 @@ def step_integrationruntimes_listauthkeys(test, rg): @try_manual def step_integrationruntimes_upgrade(test, rg): test.cmd('az datafactory integration-runtime remove-link ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' - '--linked-factory-name "myFactoryName-linked" ' + '--linked-factory-name "exampleFactoryName-linked" ' '--resource-group "{rg}"', checks=[]) @@ -452,21 +453,18 @@ def step_integrationruntimes_upgrade(test, rg): @try_manual def step_integrationruntimes_getstatus(test, rg): test.cmd('az datafactory integration-runtime get-status ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', - checks=[ - test.check('name', "{myIntegrationRuntime}"), - test.check('properties.dataFactoryName', "{myFactoryName}") - ]) + checks=[]) # EXAMPLE: IntegrationRuntimes_Start @try_manual def step_integrationruntimes_start(test, rg): test.cmd('az datafactory integration-runtime start ' - '--factory-name "{myFactoryName}" ' - '--name "{myIntegrationRuntime}" ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' '--resource-group "{rg}"', checks=[]) @@ -475,8 +473,8 @@ def step_integrationruntimes_start(test, rg): @try_manual def step_integrationruntimes_stop(test, rg): test.cmd('az datafactory integration-runtime stop ' - '--factory-name "{myFactoryName}" ' - '--name "{myIntegrationRuntime}" ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' '--resource-group "{rg}"', checks=[]) @@ -485,7 +483,7 @@ def step_integrationruntimes_stop(test, rg): @try_manual def step_triggers_geteventsubscriptionstatus(test, rg): test.cmd('az datafactory trigger get-event-subscription-status ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--name "{myTrigger}"', checks=[]) @@ -495,21 +493,19 @@ def step_triggers_geteventsubscriptionstatus(test, rg): @try_manual def step_activityruns_querybypipelinerun(test, rg): test.cmd('az datafactory activity-run query-by-pipeline-run ' - '--factory-name "{myFactoryName}" ' - '--last-updated-after "{myStartTime}" ' - '--last-updated-before "{myEndTime}" ' + '--factory-name "{myFactory}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' '--resource-group "{rg}" ' - '--run-id "{myRunId}"', - checks=[ - test.check('value[0].pipelineRunId', "{myRunId}") - ]) + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=[]) # EXAMPLE: Triggers_UnsubscribeFromEvents @try_manual def step_triggers_unsubscribefromevents(test, rg): test.cmd('az datafactory trigger unsubscribe-from-event ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--name "{myTrigger}"', checks=[]) @@ -519,7 +515,7 @@ def step_triggers_unsubscribefromevents(test, rg): @try_manual def step_triggers_subscribetoevents(test, rg): test.cmd('az datafactory trigger subscribe-to-event ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--name "{myTrigger}"', checks=[]) @@ -529,7 +525,7 @@ def step_triggers_subscribetoevents(test, rg): @try_manual def step_triggers_start(test, rg): test.cmd('az datafactory trigger start ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--name "{myTrigger}"', checks=[]) @@ -539,7 +535,7 @@ def step_triggers_start(test, rg): @try_manual def step_triggers_stop(test, rg): test.cmd('az datafactory trigger stop ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--name "{myTrigger}"', checks=[]) @@ -549,7 +545,7 @@ def step_triggers_stop(test, rg): @try_manual def step_factories_getgithubaccesstoken(test, rg): test.cmd('az datafactory factory get-git-hub-access-token ' - '--name "{myFactoryName}" ' + '--name "{myFactory}" ' '--git-hub-access-code "some" ' '--git-hub-access-token-base-url "some" ' '--git-hub-client-id "some" ' @@ -561,26 +557,24 @@ def step_factories_getgithubaccesstoken(test, rg): @try_manual def step_factories_getdataplaneaccess(test, rg): test.cmd('az datafactory factory get-data-plane-access ' - '--name "{myFactoryName}" ' + '--name "{myFactory}" ' '--access-resource-path "" ' - '--expire-time "{myEndTime}" ' + '--expire-time "2018-11-10T09:46:20.2659347Z" ' '--permissions "r" ' '--profile-name "DefaultProfile" ' - '--start-time "{myStartTime}" ' + '--start-time "2018-11-10T02:46:20.2659347Z" ' '--resource-group "{rg}"', - checks=[ - test.check('policy.permissions', 'r') - ]) + checks=[]) # EXAMPLE: PipelineRuns_QueryByFactory @try_manual def step_pipelineruns_querybyfactory(test, rg): test.cmd('az datafactory pipeline-run query-by-factory ' - '--factory-name "{myFactoryName}" ' - '--filters operand="PipelineName" operator="Equals" values="myPipeline" ' - '--last-updated-after "{myStartTime}" ' - '--last-updated-before "{myEndTime}" ' + '--factory-name "{myFactory}" ' + '--filters operand="PipelineName" operator="Equals" values="{myPipeline}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' '--resource-group "{rg}"', checks=[]) @@ -589,9 +583,9 @@ def step_pipelineruns_querybyfactory(test, rg): @try_manual def step_pipelineruns_cancel(test, rg): test.cmd('az datafactory pipeline-run cancel ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' - '--run-id "{myRunId}"', + '--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721"', checks=[]) @@ -599,10 +593,10 @@ def step_pipelineruns_cancel(test, rg): @try_manual def step_triggerruns_querybyfactory(test, rg): test.cmd('az datafactory trigger-run query-by-factory ' - '--factory-name "{myFactoryName}" ' - '--filters operand="TriggerName" operator="Equals" values="myTrigger" ' - '--last-updated-after "{myStartTime}" ' - '--last-updated-before "{myEndTime}" ' + '--factory-name "{myFactory}" ' + '--filters operand="TriggerName" operator="Equals" values="{myTrigger}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' '--resource-group "{rg}"', checks=[]) @@ -612,7 +606,7 @@ def step_triggerruns_querybyfactory(test, rg): def step_factories_configurefactoryrepo(test, rg): test.cmd('az datafactory factory configure-factory-repo ' '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' - 'ry/factories/{myFactoryName}" ' + 'ry/factories/{myFactory}" ' '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' '--location "East US"', @@ -622,8 +616,8 @@ def step_factories_configurefactoryrepo(test, rg): # EXAMPLE: IntegrationRuntimes_Delete @try_manual def step_integrationruntimes_delete(test, rg): - test.cmd('az datafactory integration-runtime delete ' - '--factory-name "{myFactoryName}" ' + test.cmd('az datafactory integration-runtime delete -y ' + '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}"', checks=[]) @@ -632,8 +626,8 @@ def step_integrationruntimes_delete(test, rg): # EXAMPLE: Triggers_Delete @try_manual def step_triggers_delete(test, rg): - test.cmd('az datafactory trigger delete ' - '--factory-name "{myFactoryName}" ' + test.cmd('az datafactory trigger delete -y ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' '--name "{myTrigger}"', checks=[]) @@ -642,8 +636,8 @@ def step_triggers_delete(test, rg): # EXAMPLE: Pipelines_Delete @try_manual def step_pipelines_delete(test, rg): - test.cmd('az datafactory pipeline delete ' - '--factory-name "{myFactoryName}" ' + test.cmd('az datafactory pipeline delete -y ' + '--factory-name "{myFactory}" ' '--name "{myPipeline}" ' '--resource-group "{rg}"', checks=[]) @@ -652,9 +646,9 @@ def step_pipelines_delete(test, rg): # EXAMPLE: Datasets_Delete @try_manual def step_datasets_delete(test, rg): - test.cmd('az datafactory dataset delete ' + test.cmd('az datafactory dataset delete -y ' '--name "{myDataset}" ' - '--factory-name "{myFactoryName}" ' + '--factory-name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) @@ -662,8 +656,8 @@ def step_datasets_delete(test, rg): # EXAMPLE: LinkedServices_Delete @try_manual def step_linkedservices_delete(test, rg): - test.cmd('az datafactory linked-service delete ' - '--factory-name "{myFactoryName}" ' + test.cmd('az datafactory linked-service delete -y ' + '--factory-name "{myFactory}" ' '--name "{myLinkedService}" ' '--resource-group "{rg}"', checks=[]) @@ -672,17 +666,19 @@ def step_linkedservices_delete(test, rg): # EXAMPLE: Factories_Delete @try_manual def step_factories_delete(test, rg): - test.cmd('az datafactory factory delete ' - '--name "{myFactoryName}" ' + test.cmd('az datafactory factory delete -y ' + '--name "{myFactory}" ' '--resource-group "{rg}"', checks=[]) +# Env cleanup @try_manual def cleanup(test, rg): pass +# Testcase @try_manual def call_scenario(test, rg): setup(test, rg) @@ -698,7 +694,7 @@ def call_scenario(test, rg): step_triggers_update(test, rg) step_integrationruntimes_create(test, rg) step_integrationruntimes_update(test, rg) - step_integrationruntimes_createlinkedintegrationruntime(test, rg) + step_integrationruntimes_createlinkedintegrationru(test, rg) step_pipelines_createrun(test, rg) step_integrationruntimes_get(test, rg) step_reruntriggers_listbytrigger(test, rg) @@ -753,7 +749,7 @@ def call_scenario(test, rg): @try_manual class DataFactoryManagementClientScenarioTest(ScenarioTest): - @ResourceGroupPreparer(name_prefix='clitestdatafactory_myResourceGroup'[:7], key='rg', parameter_name='rg') + @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') def test_datafactory(self, rg): self.kwargs.update({ @@ -761,13 +757,15 @@ def test_datafactory(self, rg): }) self.kwargs.update({ - 'myFactoryName': 'myFactoryName', - 'myIntegrationRuntime': 'myIntegrationRuntime', - 'myLinkedService': 'myLinkedService', - 'myDataset': 'myDataset', - 'myPipeline': 'myPipeline', - 'myTrigger': 'myTrigger', + 'myFactory': self.create_random_name(prefix='exampleFactoryName'[:9], length=18), + 'myIntegrationRuntime': self.create_random_name(prefix='exampleIntegrationRuntime'[:12], length=25), + 'myIntegrationRuntime2': 'exampleManagedIntegrationRuntime', + 'myLinkedService': self.create_random_name(prefix='exampleLinkedService'[:10], length=20), + 'myDataset': self.create_random_name(prefix='exampleDataset'[:7], length=14), + 'myPipeline': self.create_random_name(prefix='examplePipeline'[:7], length=15), + 'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14), }) call_scenario(self, rg) + calc_coverage(__file__) raise_if() diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py index e30baeb129d..df905149155 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py @@ -10,7 +10,7 @@ __all__ = ['DataFactoryManagementClient'] try: - from ._patch import patch_sdk + from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py index b431337b6c1..84eac8676c6 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py @@ -10,6 +10,7 @@ from azure.core.configuration import Configuration from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -61,6 +62,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index e51776c5504..ab18f1f6cb4 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -33,6 +33,8 @@ from .operations import TriggerRunOperations from .operations import DataFlowOperations from .operations import DataFlowDebugSessionOperations +from .operations import ManagedVirtualNetworkOperations +from .operations import ManagedPrivateEndpointOperations from . import models @@ -69,6 +71,10 @@ class DataFactoryManagementClient(object): :vartype data_flow: data_factory_management_client.operations.DataFlowOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations :vartype data_flow_debug_session: data_factory_management_client.operations.DataFlowDebugSessionOperations + :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations + :vartype managed_virtual_network: data_factory_management_client.operations.ManagedVirtualNetworkOperations + :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations + :vartype managed_private_endpoint: data_factory_management_client.operations.ManagedPrivateEndpointOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. @@ -124,6 +130,10 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( self._client, self._config, self._serialize, self._deserialize) + self.managed_virtual_network = ManagedVirtualNetworkOperations( + self._client, self._config, self._serialize, self._deserialize) + self.managed_private_endpoint = ManagedPrivateEndpointOperations( + self._client, self._config, self._serialize, self._deserialize) def close(self): # type: () -> None diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py deleted file mode 100644 index c47f66669f1..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py +++ /dev/null @@ -1,9 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -VERSION = "1.0.0" diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py index 64dd1910a6a..411d6c4a66e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py @@ -10,6 +10,7 @@ from azure.core.configuration import Configuration from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -57,6 +58,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py index d6bd9544145..b2b322686b8 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py @@ -31,6 +31,8 @@ from .operations_async import TriggerRunOperations from .operations_async import DataFlowOperations from .operations_async import DataFlowDebugSessionOperations +from .operations_async import ManagedVirtualNetworkOperations +from .operations_async import ManagedPrivateEndpointOperations from .. import models @@ -67,6 +69,10 @@ class DataFactoryManagementClient(object): :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations + :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations + :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations + :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations + :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. @@ -121,6 +127,10 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( self._client, self._config, self._serialize, self._deserialize) + self.managed_virtual_network = ManagedVirtualNetworkOperations( + self._client, self._config, self._serialize, self._deserialize) + self.managed_private_endpoint = ManagedPrivateEndpointOperations( + self._client, self._config, self._serialize, self._deserialize) async def close(self) -> None: await self._client.close() diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py index e679cf7ad00..554e3ba9232 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py @@ -21,6 +21,8 @@ from ._trigger_run_operations_async import TriggerRunOperations from ._data_flow_operations_async import DataFlowOperations from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations +from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations +from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations __all__ = [ 'OperationOperations', @@ -38,4 +40,6 @@ 'TriggerRunOperations', 'DataFlowOperations', 'DataFlowDebugSessionOperations', + 'ManagedVirtualNetworkOperations', + 'ManagedPrivateEndpointOperations', ] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py index f9e1d504794..0d2e56be08b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py @@ -48,7 +48,7 @@ async def query_by_pipeline_run( run_id: str, last_updated_after: datetime.datetime, last_updated_before: datetime.datetime, - continuation_token: Optional[str] = None, + continuation_token_parameter: Optional[str] = None, filters: Optional[List["models.RunQueryFilter"]] = None, order_by: Optional[List["models.RunQueryOrderBy"]] = None, **kwargs @@ -67,9 +67,9 @@ async def query_by_pipeline_run( :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' format. :type last_updated_before: ~datetime.datetime - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param filters: List of filters. :type filters: list[~data_factory_management_client.models.RunQueryFilter] :param order_by: List of OrderBy option. @@ -83,7 +83,7 @@ async def query_by_pipeline_run( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.RunFilterParameters(continuation_token=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) + filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -106,9 +106,8 @@ async def query_by_pipeline_run( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'RunFilterParameters') + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py index ef1d5dc530e..f1bf8ee8f73 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py @@ -12,7 +12,7 @@ from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling @@ -53,12 +53,12 @@ async def _create_initial( name: Optional[str] = None, properties: Optional["models.IntegrationRuntime"] = None, **kwargs - ) -> "models.CreateDataFlowDebugSessionResponse": - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"] + ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) + request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -80,9 +80,8 @@ async def _create_initial( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'CreateDataFlowDebugSessionRequest') + body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -107,7 +106,7 @@ async def _create_initial( return deserialized _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore - async def create( + async def begin_create( self, resource_group_name: str, factory_name: str, @@ -117,7 +116,7 @@ async def create( name: Optional[str] = None, properties: Optional["models.IntegrationRuntime"] = None, **kwargs - ) -> "models.CreateDataFlowDebugSessionResponse": + ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]: """Creates a data flow debug session. :param resource_group_name: The resource group name. @@ -125,10 +124,10 @@ async def create( :param factory_name: The factory name. :type factory_name: str :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. + setting in integration runtime if provided. :type compute_type: str :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. + in integration runtime if provided. :type core_count: int :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int @@ -137,12 +136,13 @@ async def create( :param properties: Integration runtime properties. :type properties: ~data_factory_management_client.models.IntegrationRuntime :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: CreateDataFlowDebugSessionResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateDataFlowDebugSessionResponse + :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -151,17 +151,19 @@ async def create( 'polling_interval', self._config.polling_interval ) - raw_result = await self._create_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + compute_type=compute_type, + core_count=core_count, + time_to_live=time_to_live, + name=name, + properties=properties, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -176,8 +178,16 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore def query_by_factory( self, @@ -202,6 +212,10 @@ def query_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -215,15 +229,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.post(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -260,9 +270,8 @@ async def add_data_flow( source_settings: Optional[List["models.DataFlowSourceSetting"]] = None, parameters: Optional[Dict[str, object]] = None, dataset_parameters: Optional[object] = None, - folder_path: Optional[str] = None, + folder_path: Optional[object] = None, reference_name: Optional[str] = None, - parameter_value_specification_parameters: Optional[Dict[str, object]] = None, name: Optional[str] = None, properties: Optional["models.DataFlow"] = None, **kwargs @@ -285,12 +294,11 @@ async def add_data_flow( :type parameters: dict[str, object] :param dataset_parameters: Parameters for dataset. :type dataset_parameters: object - :param folder_path: Folder path for staging blob. - :type folder_path: str + :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :type folder_path: object :param reference_name: Reference LinkedService name. :type reference_name: str - :param parameter_value_specification_parameters: Arguments for LinkedService. - :type parameter_value_specification_parameters: dict[str, object] :param name: The resource name. :type name: str :param properties: Data flow properties. @@ -304,7 +312,7 @@ async def add_data_flow( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, parameters_staging_linked_service_parameters=parameter_value_specification_parameters, name=name, properties=properties) + request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -326,9 +334,8 @@ async def add_data_flow( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'DataFlowDebugPackage') + body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -371,7 +378,7 @@ async def delete( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) + request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -392,9 +399,8 @@ async def delete( header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'DeleteDataFlowDebugSessionRequest') + body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -418,12 +424,12 @@ async def _execute_command_initial( command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, **kwargs - ) -> "models.DataFlowDebugCommandResponse": - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"] + ) -> Optional["models.DataFlowDebugCommandResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) + request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -445,9 +451,8 @@ async def _execute_command_initial( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'DataFlowDebugCommandRequest') + body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -472,7 +477,7 @@ async def _execute_command_initial( return deserialized _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore - async def execute_command( + async def begin_execute_command( self, resource_group_name: str, factory_name: str, @@ -480,7 +485,7 @@ async def execute_command( command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, **kwargs - ) -> "models.DataFlowDebugCommandResponse": + ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]: """Execute a data flow debug command. :param resource_group_name: The resource group name. @@ -494,12 +499,13 @@ async def execute_command( :param command_payload: The command payload object. :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: DataFlowDebugCommandResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowDebugCommandResponse + :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -508,15 +514,17 @@ async def execute_command( 'polling_interval', self._config.polling_interval ) - raw_result = await self._execute_command_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._execute_command_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + session_id=session_id, + command=command, + command_payload=command_payload, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -531,5 +539,13 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py index 3b97a88a76d..b5c2e5656ce 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py @@ -72,7 +72,7 @@ async def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _data_flow = models.DataFlowResource(properties=properties) + data_flow = models.DataFlowResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -97,9 +97,8 @@ async def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_data_flow, 'DataFlowResource') + body_content = self._serialize.body(data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -167,7 +166,6 @@ async def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -226,7 +224,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -263,6 +260,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -276,15 +277,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py index 80870fe14ff..a8be0369365 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py @@ -64,6 +64,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -77,15 +81,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -143,7 +143,7 @@ async def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _dataset = models.DatasetResource(properties=properties) + dataset = models.DatasetResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -168,9 +168,8 @@ async def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_dataset, 'DatasetResource') + body_content = self._serialize.body(dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -196,7 +195,7 @@ async def get( dataset_name: str, if_none_match: Optional[str] = None, **kwargs - ) -> "models.DatasetResource": + ) -> Optional["models.DatasetResource"]: """Gets a dataset. :param resource_group_name: The resource group name. @@ -213,7 +212,7 @@ async def get( :rtype: ~data_factory_management_client.models.DatasetResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -238,7 +237,6 @@ async def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -299,7 +297,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py index 5a17797afe8..b20acb1c3c8 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py @@ -5,7 +5,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error @@ -64,7 +64,7 @@ async def get_feature_value( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -85,9 +85,8 @@ async def get_feature_value( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_exposure_control_request, 'ExposureControlRequest') + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -133,7 +132,7 @@ async def get_feature_value_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -155,9 +154,8 @@ async def get_feature_value_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_exposure_control_request, 'ExposureControlRequest') + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -175,3 +173,69 @@ async def get_feature_value_by_factory( return deserialized get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore + + async def query_feature_value_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_requests: List["models.ExposureControlRequest"], + **kwargs + ) -> "models.ExposureControlBatchResponse": + """Get list of exposure control features for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param exposure_control_requests: List of exposure control features. + :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlBatchResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.query_feature_value_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py index 9f0749f7af8..74892302b3f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py @@ -5,7 +5,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -58,6 +58,10 @@ def list( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore @@ -69,15 +73,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -128,7 +128,7 @@ async def configure_factory_repo( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) + factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -149,9 +149,8 @@ async def configure_factory_repo( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_factory_repo_update, 'FactoryRepoUpdate') + body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -190,6 +189,10 @@ def list_by_resource_group( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_resource_group.metadata['url'] # type: ignore @@ -202,15 +205,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -247,6 +246,7 @@ async def create_or_update( identity: Optional["models.FactoryIdentity"] = None, repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None, + public_network_access: Optional[Union[str, "models.PublicNetworkAccess"]] = None, **kwargs ) -> "models.Factory": """Creates or updates a factory. @@ -268,6 +268,9 @@ async def create_or_update( :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory @@ -277,7 +280,7 @@ async def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) + factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters, public_network_access=public_network_access) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -301,9 +304,8 @@ async def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_factory, 'Factory') + body_content = self._serialize.body(factory, 'Factory') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -349,7 +351,7 @@ async def update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) + factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -371,9 +373,8 @@ async def update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_factory_update_parameters, 'FactoryUpdateParameters') + body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) @@ -398,7 +399,7 @@ async def get( factory_name: str, if_none_match: Optional[str] = None, **kwargs - ) -> "models.Factory": + ) -> Optional["models.Factory"]: """Gets a factory. :param resource_group_name: The resource group name. @@ -413,7 +414,7 @@ async def get( :rtype: ~data_factory_management_client.models.Factory or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -437,7 +438,6 @@ async def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -494,7 +494,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -538,7 +537,7 @@ async def get_git_hub_access_token( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) + git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -560,9 +559,8 @@ async def get_git_hub_access_token( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_git_hub_access_token_request, 'GitHubAccessTokenRequest') + body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -621,7 +619,7 @@ async def get_data_plane_access( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) + policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -643,9 +641,8 @@ async def get_data_plane_access( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_policy, 'UserAccessPolicy') + body_content = self._serialize.body(policy, 'UserAccessPolicy') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py index 1240f8957cd..a6022196653 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py @@ -87,7 +87,6 @@ async def get( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -150,7 +149,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -195,7 +193,7 @@ async def update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) + update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -219,9 +217,8 @@ async def update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') + body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) @@ -287,7 +284,6 @@ async def get_ip_address( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py index a8f7f6dd601..70df0716c21 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py @@ -11,7 +11,7 @@ from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling @@ -48,8 +48,8 @@ async def _refresh_initial( factory_name: str, integration_runtime_name: str, **kwargs - ) -> "models.SsisObjectMetadataStatusResponse": - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"] + ) -> Optional["models.SsisObjectMetadataStatusResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -72,7 +72,6 @@ async def _refresh_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -91,13 +90,13 @@ async def _refresh_initial( return deserialized _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore - async def refresh( + async def begin_refresh( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs - ) -> "models.SsisObjectMetadataStatusResponse": + ) -> AsyncLROPoller["models.SsisObjectMetadataStatusResponse"]: """Refresh a SSIS integration runtime object metadata. :param resource_group_name: The resource group name. @@ -107,12 +106,13 @@ async def refresh( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: SsisObjectMetadataStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SsisObjectMetadataStatusResponse + :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -121,13 +121,15 @@ async def refresh( 'polling_interval', self._config.polling_interval ) - raw_result = await self._refresh_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._refresh_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -142,8 +144,16 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore async def get( self, @@ -153,7 +163,8 @@ async def get( metadata_path: Optional[str] = None, **kwargs ) -> "models.SsisObjectMetadataListResponse": - """Get a SSIS integration runtime object metadata by specified path. The return is pageable metadata list. + """Get a SSIS integration runtime object metadata by specified path. The return is pageable + metadata list. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -172,7 +183,7 @@ async def get( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) + get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -195,10 +206,9 @@ async def get( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - if _get_metadata_request is not None: - body_content = self._serialize.body(_get_metadata_request, 'GetSsisObjectMetadataRequest') + if get_metadata_request is not None: + body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') else: body_content = None body_content_kwargs['content'] = body_content diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py index 66326394ac2..82b285c7a74 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py @@ -12,7 +12,7 @@ from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling @@ -66,6 +66,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -79,15 +83,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -145,7 +145,7 @@ async def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _integration_runtime = models.IntegrationRuntimeResource(properties=properties) + integration_runtime = models.IntegrationRuntimeResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -170,9 +170,8 @@ async def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_integration_runtime, 'IntegrationRuntimeResource') + body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -198,7 +197,7 @@ async def get( integration_runtime_name: str, if_none_match: Optional[str] = None, **kwargs - ) -> "models.IntegrationRuntimeResource": + ) -> Optional["models.IntegrationRuntimeResource"]: """Gets an integration runtime. :param resource_group_name: The resource group name. @@ -216,7 +215,7 @@ async def get( :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -241,7 +240,6 @@ async def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -292,7 +290,7 @@ async def update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) + update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -315,9 +313,8 @@ async def update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) @@ -378,7 +375,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -435,7 +431,6 @@ async def get_status( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -459,7 +454,8 @@ async def get_connection_info( integration_runtime_name: str, **kwargs ) -> "models.IntegrationRuntimeConnectionInfo": - """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. + """Gets the on-premises integration runtime connection information for encrypting the on-premises + data source credentials. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -495,7 +491,6 @@ async def get_connection_info( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -539,7 +534,7 @@ async def regenerate_auth_key( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) + regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -562,9 +557,8 @@ async def regenerate_auth_key( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') + body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -626,7 +620,6 @@ async def list_auth_key( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -649,8 +642,8 @@ async def _start_initial( factory_name: str, integration_runtime_name: str, **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + ) -> Optional["models.IntegrationRuntimeStatusResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -673,7 +666,6 @@ async def _start_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -692,13 +684,13 @@ async def _start_initial( return deserialized _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - async def start( + async def begin_start( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": + ) -> AsyncLROPoller["models.IntegrationRuntimeStatusResponse"]: """Starts a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -708,12 +700,13 @@ async def start( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -722,13 +715,15 @@ async def start( 'polling_interval', self._config.polling_interval ) - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -743,8 +738,16 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore async def _stop_initial( self, @@ -775,7 +778,6 @@ async def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -789,13 +791,13 @@ async def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - async def stop( + async def begin_stop( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs - ) -> None: + ) -> AsyncLROPoller[None]: """Stops a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -805,12 +807,13 @@ async def stop( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: None, or the result of cls(response) - :rtype: None + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -819,13 +822,15 @@ async def stop( 'polling_interval', self._config.polling_interval ) - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -837,8 +842,16 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore async def sync_credentials( self, @@ -847,7 +860,10 @@ async def sync_credentials( integration_runtime_name: str, **kwargs ) -> None: - """Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the dispatcher node. If you already have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime node than using this API directly. + """Force the integration runtime to synchronize credentials across integration runtime nodes, and + this will override the credentials across all worker nodes with those available on the + dispatcher node. If you already have the latest credential backup file, you should manually + import it (preferred) on any self-hosted integration runtime node than using this API directly. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -882,7 +898,6 @@ async def sync_credentials( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -903,7 +918,8 @@ async def get_monitoring_data( integration_runtime_name: str, **kwargs ) -> "models.IntegrationRuntimeMonitoringData": - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. + """Get the integration runtime monitoring data, which includes the monitor data for all the nodes + under this integration runtime. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -939,7 +955,6 @@ async def get_monitoring_data( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -998,7 +1013,6 @@ async def upgrade( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1020,7 +1034,8 @@ async def remove_link( linked_factory_name: str, **kwargs ) -> None: - """Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime. + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -1039,7 +1054,7 @@ async def remove_link( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) + linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -1061,9 +1076,8 @@ async def remove_link( header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -1118,7 +1132,7 @@ async def create_linked_integration_runtime( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) + create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -1141,9 +1155,8 @@ async def create_linked_integration_runtime( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py index 7da8d693512..56e9e6f663a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py @@ -64,6 +64,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -77,15 +81,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -143,7 +143,7 @@ async def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _linked_service = models.LinkedServiceResource(properties=properties) + linked_service = models.LinkedServiceResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -168,9 +168,8 @@ async def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_linked_service, 'LinkedServiceResource') + body_content = self._serialize.body(linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -196,7 +195,7 @@ async def get( linked_service_name: str, if_none_match: Optional[str] = None, **kwargs - ) -> "models.LinkedServiceResource": + ) -> Optional["models.LinkedServiceResource"]: """Gets a linked service. :param resource_group_name: The resource group name. @@ -214,7 +213,7 @@ async def get( :rtype: ~data_factory_management_client.models.LinkedServiceResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -239,7 +238,6 @@ async def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -300,7 +298,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py new file mode 100644 index 00000000000..3a899779963 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py @@ -0,0 +1,336 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ManagedPrivateEndpointOperations: + """ManagedPrivateEndpointOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + **kwargs + ) -> AsyncIterable["models.ManagedPrivateEndpointListResponse"]: + """Lists managed private endpoints. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + if_match: Optional[str] = None, + connection_state: Optional["models.ConnectionStateProperties"] = None, + fqdns: Optional[List[str]] = None, + group_id: Optional[str] = None, + private_link_resource_id: Optional[str] = None, + **kwargs + ) -> "models.ManagedPrivateEndpointResource": + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :param connection_state: The managed private endpoint connection state. + :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :param fqdns: Fully qualified domain names. + :type fqdns: list[str] + :param group_id: The groupId to which the managed private endpoint is created. + :type group_id: str + :param private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :type private_link_resource_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.ManagedPrivateEndpointResource": + """Gets a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + **kwargs + ) -> None: + """Deletes a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py new file mode 100644 index 00000000000..2152988d7ef --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py @@ -0,0 +1,255 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ManagedVirtualNetworkOperations: + """ManagedVirtualNetworkOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.ManagedVirtualNetworkListResponse"]: + """Lists managed Virtual Networks. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + properties: "models.ManagedVirtualNetwork", + if_match: Optional[str] = None, + **kwargs + ) -> "models.ManagedVirtualNetworkResource": + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param properties: Managed Virtual Network properties. + :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.ManagedVirtualNetworkResource": + """Gets a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py index 434c8c20e11..83206d77039 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py @@ -58,6 +58,10 @@ def list( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore @@ -65,15 +69,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py index b6446d80d3d..34c7453f951 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py @@ -64,6 +64,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -77,15 +81,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -166,7 +166,6 @@ async def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content @@ -194,7 +193,7 @@ async def get( pipeline_name: str, if_none_match: Optional[str] = None, **kwargs - ) -> "models.PipelineResource": + ) -> Optional["models.PipelineResource"]: """Gets a pipeline. :param resource_group_name: The resource group name. @@ -211,7 +210,7 @@ async def get( :rtype: ~data_factory_management_client.models.PipelineResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -236,7 +235,6 @@ async def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -297,7 +295,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -384,7 +381,6 @@ async def create_run( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: body_content = self._serialize.body(parameters, '{object}') diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py index c475e2ba041..5cdfd09fe01 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py @@ -47,7 +47,7 @@ async def query_by_factory( factory_name: str, last_updated_after: datetime.datetime, last_updated_before: datetime.datetime, - continuation_token: Optional[str] = None, + continuation_token_parameter: Optional[str] = None, filters: Optional[List["models.RunQueryFilter"]] = None, order_by: Optional[List["models.RunQueryOrderBy"]] = None, **kwargs @@ -64,9 +64,9 @@ async def query_by_factory( :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' format. :type last_updated_before: ~datetime.datetime - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param filters: List of filters. :type filters: list[~data_factory_management_client.models.RunQueryFilter] :param order_by: List of OrderBy option. @@ -80,7 +80,7 @@ async def query_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.RunFilterParameters(continuation_token=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) + filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -102,9 +102,8 @@ async def query_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'RunFilterParameters') + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -166,7 +165,6 @@ async def get( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -231,7 +229,6 @@ async def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py index 76cb6883eac..f4669b45bc2 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py @@ -12,7 +12,7 @@ from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling @@ -66,6 +66,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -79,15 +83,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): @@ -118,7 +118,7 @@ async def query_by_factory( self, resource_group_name: str, factory_name: str, - continuation_token: Optional[str] = None, + continuation_token_parameter: Optional[str] = None, parent_trigger_name: Optional[str] = None, **kwargs ) -> "models.TriggerQueryResponse": @@ -128,9 +128,9 @@ async def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun triggers. :type parent_trigger_name: str @@ -143,7 +143,7 @@ async def query_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token, parent_trigger_name=parent_trigger_name) + filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -165,9 +165,8 @@ async def query_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'TriggerFilterParameters') + body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -217,7 +216,7 @@ async def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _trigger = models.TriggerResource(properties=properties) + trigger = models.TriggerResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -242,9 +241,8 @@ async def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_trigger, 'TriggerResource') + body_content = self._serialize.body(trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -270,7 +268,7 @@ async def get( trigger_name: str, if_none_match: Optional[str] = None, **kwargs - ) -> "models.TriggerResource": + ) -> Optional["models.TriggerResource"]: """Gets a trigger. :param resource_group_name: The resource group name. @@ -287,7 +285,7 @@ async def get( :rtype: ~data_factory_management_client.models.TriggerResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -312,7 +310,6 @@ async def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -373,7 +370,6 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -393,8 +389,8 @@ async def _subscribe_to_event_initial( factory_name: str, trigger_name: str, **kwargs - ) -> "models.TriggerSubscriptionOperationStatus": - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + ) -> Optional["models.TriggerSubscriptionOperationStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -417,7 +413,6 @@ async def _subscribe_to_event_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -436,13 +431,13 @@ async def _subscribe_to_event_initial( return deserialized _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - async def subscribe_to_event( + async def begin_subscribe_to_event( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs - ) -> "models.TriggerSubscriptionOperationStatus": + ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: """Subscribe event trigger to events. :param resource_group_name: The resource group name. @@ -452,12 +447,13 @@ async def subscribe_to_event( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus + :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -466,13 +462,15 @@ async def subscribe_to_event( 'polling_interval', self._config.polling_interval ) - raw_result = await self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._subscribe_to_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -487,8 +485,16 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore async def get_event_subscription_status( self, @@ -533,7 +539,6 @@ async def get_event_subscription_status( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -556,8 +561,8 @@ async def _unsubscribe_from_event_initial( factory_name: str, trigger_name: str, **kwargs - ) -> "models.TriggerSubscriptionOperationStatus": - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + ) -> Optional["models.TriggerSubscriptionOperationStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -580,7 +585,6 @@ async def _unsubscribe_from_event_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -599,13 +603,13 @@ async def _unsubscribe_from_event_initial( return deserialized _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - async def unsubscribe_from_event( + async def begin_unsubscribe_from_event( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs - ) -> "models.TriggerSubscriptionOperationStatus": + ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: """Unsubscribe event trigger from events. :param resource_group_name: The resource group name. @@ -615,12 +619,13 @@ async def unsubscribe_from_event( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus + :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -629,13 +634,15 @@ async def unsubscribe_from_event( 'polling_interval', self._config.polling_interval ) - raw_result = await self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._unsubscribe_from_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -650,8 +657,16 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore async def _start_initial( self, @@ -682,7 +697,6 @@ async def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -696,13 +710,13 @@ async def _start_initial( _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - async def start( + async def begin_start( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs - ) -> None: + ) -> AsyncLROPoller[None]: """Starts a trigger. :param resource_group_name: The resource group name. @@ -712,12 +726,13 @@ async def start( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: None, or the result of cls(response) - :rtype: None + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -726,13 +741,15 @@ async def start( 'polling_interval', self._config.polling_interval ) - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -744,8 +761,16 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore async def _stop_initial( self, @@ -776,7 +801,6 @@ async def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -790,13 +814,13 @@ async def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore - async def stop( + async def begin_stop( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs - ) -> None: + ) -> AsyncLROPoller[None]: """Stops a trigger. :param resource_group_name: The resource group name. @@ -806,12 +830,13 @@ async def stop( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: None, or the result of cls(response) - :rtype: None + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -820,13 +845,15 @@ async def stop( 'polling_interval', self._config.polling_interval ) - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -838,5 +865,13 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling - return await async_poller(self._client, raw_result, get_long_running_output, polling_method) - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py index 5830d27971d..3401f9c95c1 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py @@ -87,7 +87,6 @@ async def rerun( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -101,13 +100,72 @@ async def rerun( rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + async def cancel( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + run_id: str, + **kwargs + ) -> None: + """Cancel a single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.cancel.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + async def query_by_factory( self, resource_group_name: str, factory_name: str, last_updated_after: datetime.datetime, last_updated_before: datetime.datetime, - continuation_token: Optional[str] = None, + continuation_token_parameter: Optional[str] = None, filters: Optional[List["models.RunQueryFilter"]] = None, order_by: Optional[List["models.RunQueryOrderBy"]] = None, **kwargs @@ -124,9 +182,9 @@ async def query_by_factory( :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' format. :type last_updated_before: ~datetime.datetime - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param filters: List of filters. :type filters: list[~data_factory_management_client.models.RunQueryFilter] :param order_by: List of OrderBy option. @@ -140,7 +198,7 @@ async def query_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.RunFilterParameters(continuation_token=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) + filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -162,9 +220,8 @@ async def query_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'RunFilterParameters') + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 00bfbf9df23..4d780f2444c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -31,6 +31,7 @@ from ._models_py3 import AvroSink from ._models_py3 import AvroSource from ._models_py3 import AvroWriteSettings + from ._models_py3 import AzPowerShellSetup from ._models_py3 import AzureBatchLinkedService from ._models_py3 import AzureBlobDataset from ._models_py3 import AzureBlobFsDataset @@ -57,10 +58,17 @@ from ._models_py3 import AzureDataLakeStoreSink from ._models_py3 import AzureDataLakeStoreSource from ._models_py3 import AzureDataLakeStoreWriteSettings + from ._models_py3 import AzureDatabricksDeltaLakeDataset + from ._models_py3 import AzureDatabricksDeltaLakeExportCommand + from ._models_py3 import AzureDatabricksDeltaLakeImportCommand + from ._models_py3 import AzureDatabricksDeltaLakeLinkedService + from ._models_py3 import AzureDatabricksDeltaLakeSink + from ._models_py3 import AzureDatabricksDeltaLakeSource from ._models_py3 import AzureDatabricksLinkedService from ._models_py3 import AzureFileStorageLinkedService from ._models_py3 import AzureFileStorageLocation from ._models_py3 import AzureFileStorageReadSettings + from ._models_py3 import AzureFileStorageWriteSettings from ._models_py3 import AzureFunctionActivity from ._models_py3 import AzureFunctionLinkedService from ._models_py3 import AzureKeyVaultLinkedService @@ -74,26 +82,26 @@ from ._models_py3 import AzureMlServiceLinkedService from ._models_py3 import AzureMlUpdateResourceActivity from ._models_py3 import AzureMlWebServiceFile - from ._models_py3 import AzureMySqlLinkedService - from ._models_py3 import AzureMySqlSink - from ._models_py3 import AzureMySqlSource - from ._models_py3 import AzureMySqlTableDataset - from ._models_py3 import AzurePostgreSqlLinkedService - from ._models_py3 import AzurePostgreSqlSink - from ._models_py3 import AzurePostgreSqlSource - from ._models_py3 import AzurePostgreSqlTableDataset + from ._models_py3 import AzureMySQLLinkedService + from ._models_py3 import AzureMySQLSink + from ._models_py3 import AzureMySQLSource + from ._models_py3 import AzureMySQLTableDataset + from ._models_py3 import AzurePostgreSQLLinkedService + from ._models_py3 import AzurePostgreSQLSink + from ._models_py3 import AzurePostgreSQLSource + from ._models_py3 import AzurePostgreSQLTableDataset from ._models_py3 import AzureQueueSink + from ._models_py3 import AzureSQLDWLinkedService + from ._models_py3 import AzureSQLDWTableDataset + from ._models_py3 import AzureSQLDatabaseLinkedService + from ._models_py3 import AzureSQLMiLinkedService + from ._models_py3 import AzureSQLMiTableDataset + from ._models_py3 import AzureSQLSink + from ._models_py3 import AzureSQLSource + from ._models_py3 import AzureSQLTableDataset from ._models_py3 import AzureSearchIndexDataset from ._models_py3 import AzureSearchIndexSink from ._models_py3 import AzureSearchLinkedService - from ._models_py3 import AzureSqlDWLinkedService - from ._models_py3 import AzureSqlDWTableDataset - from ._models_py3 import AzureSqlDatabaseLinkedService - from ._models_py3 import AzureSqlMiLinkedService - from ._models_py3 import AzureSqlMiTableDataset - from ._models_py3 import AzureSqlSink - from ._models_py3 import AzureSqlSource - from ._models_py3 import AzureSqlTableDataset from ._models_py3 import AzureStorageLinkedService from ._models_py3 import AzureTableDataset from ._models_py3 import AzureTableSink @@ -122,8 +130,10 @@ from ._models_py3 import ConcurLinkedService from ._models_py3 import ConcurObjectDataset from ._models_py3 import ConcurSource + from ._models_py3 import ConnectionStateProperties from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity + from ._models_py3 import CopyActivityLogSettings from ._models_py3 import CopySink from ._models_py3 import CopySource from ._models_py3 import CopyTranslator @@ -132,9 +142,9 @@ from ._models_py3 import CosmosDBMongoDBApiLinkedService from ._models_py3 import CosmosDBMongoDBApiSink from ._models_py3 import CosmosDBMongoDBApiSource - from ._models_py3 import CosmosDBSqlApiCollectionDataset - from ._models_py3 import CosmosDBSqlApiSink - from ._models_py3 import CosmosDBSqlApiSource + from ._models_py3 import CosmosDBSQLApiCollectionDataset + from ._models_py3 import CosmosDBSQLApiSink + from ._models_py3 import CosmosDBSQLApiSource from ._models_py3 import CouchbaseLinkedService from ._models_py3 import CouchbaseSource from ._models_py3 import CouchbaseTableDataset @@ -182,6 +192,8 @@ from ._models_py3 import DatasetResource from ._models_py3 import DatasetSchemaDataElement from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -225,6 +237,8 @@ from ._models_py3 import ExecuteSsisPackageActivity from ._models_py3 import ExecutionActivity from ._models_py3 import ExportSettings + from ._models_py3 import ExposureControlBatchRequest + from ._models_py3 import ExposureControlBatchResponse from ._models_py3 import ExposureControlRequest from ._models_py3 import ExposureControlResponse from ._models_py3 import Expression @@ -342,6 +356,8 @@ from ._models_py3 import LinkedServiceListResponse from ._models_py3 import LinkedServiceReference from ._models_py3 import LinkedServiceResource + from ._models_py3 import LogLocationSettings + from ._models_py3 import LogSettings from ._models_py3 import LogStorageSettings from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService @@ -352,6 +368,12 @@ from ._models_py3 import ManagedIntegrationRuntimeNode from ._models_py3 import ManagedIntegrationRuntimeOperationResult from ._models_py3 import ManagedIntegrationRuntimeStatus + from ._models_py3 import ManagedPrivateEndpoint + from ._models_py3 import ManagedPrivateEndpointListResponse + from ._models_py3 import ManagedPrivateEndpointResource + from ._models_py3 import ManagedVirtualNetwork + from ._models_py3 import ManagedVirtualNetworkListResponse + from ._models_py3 import ManagedVirtualNetworkResource from ._models_py3 import MappingDataFlow from ._models_py3 import MariaDBLinkedService from ._models_py3 import MariaDBSource @@ -363,6 +385,9 @@ from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset + from ._models_py3 import MongoDBAtlasCollectionDataset + from ._models_py3 import MongoDBAtlasLinkedService + from ._models_py3 import MongoDBAtlasSource from ._models_py3 import MongoDBCollectionDataset from ._models_py3 import MongoDBCursorMethodsProperties from ._models_py3 import MongoDBLinkedService @@ -371,9 +396,9 @@ from ._models_py3 import MongoDBV2LinkedService from ._models_py3 import MongoDBV2Source from ._models_py3 import MultiplePipelineTrigger - from ._models_py3 import MySqlLinkedService - from ._models_py3 import MySqlSource - from ._models_py3 import MySqlTableDataset + from ._models_py3 import MySQLLinkedService + from ._models_py3 import MySQLSource + from ._models_py3 import MySQLTableDataset from ._models_py3 import NetezzaLinkedService from ._models_py3 import NetezzaPartitionSettings from ._models_py3 import NetezzaSource @@ -408,12 +433,14 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource @@ -427,9 +454,9 @@ from ._models_py3 import PipelineRunInvokedBy from ._models_py3 import PipelineRunsQueryResponse from ._models_py3 import PolybaseSettings - from ._models_py3 import PostgreSqlLinkedService - from ._models_py3 import PostgreSqlSource - from ._models_py3 import PostgreSqlTableDataset + from ._models_py3 import PostgreSQLLinkedService + from ._models_py3 import PostgreSQLSource + from ._models_py3 import PostgreSQLTableDataset from ._models_py3 import PrestoLinkedService from ._models_py3 import PrestoObjectDataset from ._models_py3 import PrestoSource @@ -450,11 +477,24 @@ from ._models_py3 import ResponsysSource from ._models_py3 import RestResourceDataset from ._models_py3 import RestServiceLinkedService + from ._models_py3 import RestSink from ._models_py3 import RestSource from ._models_py3 import RetryPolicy from ._models_py3 import RunFilterParameters from ._models_py3 import RunQueryFilter from ._models_py3 import RunQueryOrderBy + from ._models_py3 import SQLDWSink + from ._models_py3 import SQLDWSource + from ._models_py3 import SQLMiSink + from ._models_py3 import SQLMiSource + from ._models_py3 import SQLPartitionSettings + from ._models_py3 import SQLServerLinkedService + from ._models_py3 import SQLServerSink + from ._models_py3 import SQLServerSource + from ._models_py3 import SQLServerStoredProcedureActivity + from ._models_py3 import SQLServerTableDataset + from ._models_py3 import SQLSink + from ._models_py3 import SQLSource from ._models_py3 import SalesforceLinkedService from ._models_py3 import SalesforceMarketingCloudLinkedService from ._models_py3 import SalesforceMarketingCloudObjectDataset @@ -520,18 +560,6 @@ from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource - from ._models_py3 import SqlDWSink - from ._models_py3 import SqlDWSource - from ._models_py3 import SqlMiSink - from ._models_py3 import SqlMiSource - from ._models_py3 import SqlPartitionSettings - from ._models_py3 import SqlServerLinkedService - from ._models_py3 import SqlServerSink - from ._models_py3 import SqlServerSource - from ._models_py3 import SqlServerStoredProcedureActivity - from ._models_py3 import SqlServerTableDataset - from ._models_py3 import SqlSink - from ._models_py3 import SqlSource from ._models_py3 import SquareLinkedService from ._models_py3 import SquareObjectDataset from ._models_py3 import SquareSource @@ -565,6 +593,8 @@ from ._models_py3 import SybaseTableDataset from ._models_py3 import TabularSource from ._models_py3 import TabularTranslator + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -641,6 +671,7 @@ from ._models import AvroSink # type: ignore from ._models import AvroSource # type: ignore from ._models import AvroWriteSettings # type: ignore + from ._models import AzPowerShellSetup # type: ignore from ._models import AzureBatchLinkedService # type: ignore from ._models import AzureBlobDataset # type: ignore from ._models import AzureBlobFsDataset # type: ignore @@ -667,10 +698,17 @@ from ._models import AzureDataLakeStoreSink # type: ignore from ._models import AzureDataLakeStoreSource # type: ignore from ._models import AzureDataLakeStoreWriteSettings # type: ignore + from ._models import AzureDatabricksDeltaLakeDataset # type: ignore + from ._models import AzureDatabricksDeltaLakeExportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeImportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeLinkedService # type: ignore + from ._models import AzureDatabricksDeltaLakeSink # type: ignore + from ._models import AzureDatabricksDeltaLakeSource # type: ignore from ._models import AzureDatabricksLinkedService # type: ignore from ._models import AzureFileStorageLinkedService # type: ignore from ._models import AzureFileStorageLocation # type: ignore from ._models import AzureFileStorageReadSettings # type: ignore + from ._models import AzureFileStorageWriteSettings # type: ignore from ._models import AzureFunctionActivity # type: ignore from ._models import AzureFunctionLinkedService # type: ignore from ._models import AzureKeyVaultLinkedService # type: ignore @@ -684,26 +722,26 @@ from ._models import AzureMlServiceLinkedService # type: ignore from ._models import AzureMlUpdateResourceActivity # type: ignore from ._models import AzureMlWebServiceFile # type: ignore - from ._models import AzureMySqlLinkedService # type: ignore - from ._models import AzureMySqlSink # type: ignore - from ._models import AzureMySqlSource # type: ignore - from ._models import AzureMySqlTableDataset # type: ignore - from ._models import AzurePostgreSqlLinkedService # type: ignore - from ._models import AzurePostgreSqlSink # type: ignore - from ._models import AzurePostgreSqlSource # type: ignore - from ._models import AzurePostgreSqlTableDataset # type: ignore + from ._models import AzureMySQLLinkedService # type: ignore + from ._models import AzureMySQLSink # type: ignore + from ._models import AzureMySQLSource # type: ignore + from ._models import AzureMySQLTableDataset # type: ignore + from ._models import AzurePostgreSQLLinkedService # type: ignore + from ._models import AzurePostgreSQLSink # type: ignore + from ._models import AzurePostgreSQLSource # type: ignore + from ._models import AzurePostgreSQLTableDataset # type: ignore from ._models import AzureQueueSink # type: ignore + from ._models import AzureSQLDWLinkedService # type: ignore + from ._models import AzureSQLDWTableDataset # type: ignore + from ._models import AzureSQLDatabaseLinkedService # type: ignore + from ._models import AzureSQLMiLinkedService # type: ignore + from ._models import AzureSQLMiTableDataset # type: ignore + from ._models import AzureSQLSink # type: ignore + from ._models import AzureSQLSource # type: ignore + from ._models import AzureSQLTableDataset # type: ignore from ._models import AzureSearchIndexDataset # type: ignore from ._models import AzureSearchIndexSink # type: ignore from ._models import AzureSearchLinkedService # type: ignore - from ._models import AzureSqlDWLinkedService # type: ignore - from ._models import AzureSqlDWTableDataset # type: ignore - from ._models import AzureSqlDatabaseLinkedService # type: ignore - from ._models import AzureSqlMiLinkedService # type: ignore - from ._models import AzureSqlMiTableDataset # type: ignore - from ._models import AzureSqlSink # type: ignore - from ._models import AzureSqlSource # type: ignore - from ._models import AzureSqlTableDataset # type: ignore from ._models import AzureStorageLinkedService # type: ignore from ._models import AzureTableDataset # type: ignore from ._models import AzureTableSink # type: ignore @@ -732,8 +770,10 @@ from ._models import ConcurLinkedService # type: ignore from ._models import ConcurObjectDataset # type: ignore from ._models import ConcurSource # type: ignore + from ._models import ConnectionStateProperties # type: ignore from ._models import ControlActivity # type: ignore from ._models import CopyActivity # type: ignore + from ._models import CopyActivityLogSettings # type: ignore from ._models import CopySink # type: ignore from ._models import CopySource # type: ignore from ._models import CopyTranslator # type: ignore @@ -742,9 +782,9 @@ from ._models import CosmosDBMongoDBApiLinkedService # type: ignore from ._models import CosmosDBMongoDBApiSink # type: ignore from ._models import CosmosDBMongoDBApiSource # type: ignore - from ._models import CosmosDBSqlApiCollectionDataset # type: ignore - from ._models import CosmosDBSqlApiSink # type: ignore - from ._models import CosmosDBSqlApiSource # type: ignore + from ._models import CosmosDBSQLApiCollectionDataset # type: ignore + from ._models import CosmosDBSQLApiSink # type: ignore + from ._models import CosmosDBSQLApiSource # type: ignore from ._models import CouchbaseLinkedService # type: ignore from ._models import CouchbaseSource # type: ignore from ._models import CouchbaseTableDataset # type: ignore @@ -792,6 +832,8 @@ from ._models import DatasetResource # type: ignore from ._models import DatasetSchemaDataElement # type: ignore from ._models import DatasetStorageFormat # type: ignore + from ._models import DatasetTarCompression # type: ignore + from ._models import DatasetTarGZipCompression # type: ignore from ._models import DatasetZipDeflateCompression # type: ignore from ._models import Db2LinkedService # type: ignore from ._models import Db2Source # type: ignore @@ -835,6 +877,8 @@ from ._models import ExecuteSsisPackageActivity # type: ignore from ._models import ExecutionActivity # type: ignore from ._models import ExportSettings # type: ignore + from ._models import ExposureControlBatchRequest # type: ignore + from ._models import ExposureControlBatchResponse # type: ignore from ._models import ExposureControlRequest # type: ignore from ._models import ExposureControlResponse # type: ignore from ._models import Expression # type: ignore @@ -952,6 +996,8 @@ from ._models import LinkedServiceListResponse # type: ignore from ._models import LinkedServiceReference # type: ignore from ._models import LinkedServiceResource # type: ignore + from ._models import LogLocationSettings # type: ignore + from ._models import LogSettings # type: ignore from ._models import LogStorageSettings # type: ignore from ._models import LookupActivity # type: ignore from ._models import MagentoLinkedService # type: ignore @@ -962,6 +1008,12 @@ from ._models import ManagedIntegrationRuntimeNode # type: ignore from ._models import ManagedIntegrationRuntimeOperationResult # type: ignore from ._models import ManagedIntegrationRuntimeStatus # type: ignore + from ._models import ManagedPrivateEndpoint # type: ignore + from ._models import ManagedPrivateEndpointListResponse # type: ignore + from ._models import ManagedPrivateEndpointResource # type: ignore + from ._models import ManagedVirtualNetwork # type: ignore + from ._models import ManagedVirtualNetworkListResponse # type: ignore + from ._models import ManagedVirtualNetworkResource # type: ignore from ._models import MappingDataFlow # type: ignore from ._models import MariaDBLinkedService # type: ignore from ._models import MariaDBSource # type: ignore @@ -973,6 +1025,9 @@ from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore + from ._models import MongoDBAtlasCollectionDataset # type: ignore + from ._models import MongoDBAtlasLinkedService # type: ignore + from ._models import MongoDBAtlasSource # type: ignore from ._models import MongoDBCollectionDataset # type: ignore from ._models import MongoDBCursorMethodsProperties # type: ignore from ._models import MongoDBLinkedService # type: ignore @@ -981,9 +1036,9 @@ from ._models import MongoDBV2LinkedService # type: ignore from ._models import MongoDBV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore - from ._models import MySqlLinkedService # type: ignore - from ._models import MySqlSource # type: ignore - from ._models import MySqlTableDataset # type: ignore + from ._models import MySQLLinkedService # type: ignore + from ._models import MySQLSource # type: ignore + from ._models import MySQLTableDataset # type: ignore from ._models import NetezzaLinkedService # type: ignore from ._models import NetezzaPartitionSettings # type: ignore from ._models import NetezzaSource # type: ignore @@ -1018,12 +1073,14 @@ from ._models import OrcFormat # type: ignore from ._models import OrcSink # type: ignore from ._models import OrcSource # type: ignore + from ._models import OrcWriteSettings # type: ignore from ._models import PackageStore # type: ignore from ._models import ParameterSpecification # type: ignore from ._models import ParquetDataset # type: ignore from ._models import ParquetFormat # type: ignore from ._models import ParquetSink # type: ignore from ._models import ParquetSource # type: ignore + from ._models import ParquetWriteSettings # type: ignore from ._models import PaypalLinkedService # type: ignore from ._models import PaypalObjectDataset # type: ignore from ._models import PaypalSource # type: ignore @@ -1037,9 +1094,9 @@ from ._models import PipelineRunInvokedBy # type: ignore from ._models import PipelineRunsQueryResponse # type: ignore from ._models import PolybaseSettings # type: ignore - from ._models import PostgreSqlLinkedService # type: ignore - from ._models import PostgreSqlSource # type: ignore - from ._models import PostgreSqlTableDataset # type: ignore + from ._models import PostgreSQLLinkedService # type: ignore + from ._models import PostgreSQLSource # type: ignore + from ._models import PostgreSQLTableDataset # type: ignore from ._models import PrestoLinkedService # type: ignore from ._models import PrestoObjectDataset # type: ignore from ._models import PrestoSource # type: ignore @@ -1060,11 +1117,24 @@ from ._models import ResponsysSource # type: ignore from ._models import RestResourceDataset # type: ignore from ._models import RestServiceLinkedService # type: ignore + from ._models import RestSink # type: ignore from ._models import RestSource # type: ignore from ._models import RetryPolicy # type: ignore from ._models import RunFilterParameters # type: ignore from ._models import RunQueryFilter # type: ignore from ._models import RunQueryOrderBy # type: ignore + from ._models import SQLDWSink # type: ignore + from ._models import SQLDWSource # type: ignore + from ._models import SQLMiSink # type: ignore + from ._models import SQLMiSource # type: ignore + from ._models import SQLPartitionSettings # type: ignore + from ._models import SQLServerLinkedService # type: ignore + from ._models import SQLServerSink # type: ignore + from ._models import SQLServerSource # type: ignore + from ._models import SQLServerStoredProcedureActivity # type: ignore + from ._models import SQLServerTableDataset # type: ignore + from ._models import SQLSink # type: ignore + from ._models import SQLSource # type: ignore from ._models import SalesforceLinkedService # type: ignore from ._models import SalesforceMarketingCloudLinkedService # type: ignore from ._models import SalesforceMarketingCloudObjectDataset # type: ignore @@ -1130,18 +1200,6 @@ from ._models import SparkLinkedService # type: ignore from ._models import SparkObjectDataset # type: ignore from ._models import SparkSource # type: ignore - from ._models import SqlDWSink # type: ignore - from ._models import SqlDWSource # type: ignore - from ._models import SqlMiSink # type: ignore - from ._models import SqlMiSource # type: ignore - from ._models import SqlPartitionSettings # type: ignore - from ._models import SqlServerLinkedService # type: ignore - from ._models import SqlServerSink # type: ignore - from ._models import SqlServerSource # type: ignore - from ._models import SqlServerStoredProcedureActivity # type: ignore - from ._models import SqlServerTableDataset # type: ignore - from ._models import SqlSink # type: ignore - from ._models import SqlSource # type: ignore from ._models import SquareLinkedService # type: ignore from ._models import SquareObjectDataset # type: ignore from ._models import SquareSource # type: ignore @@ -1175,6 +1233,8 @@ from ._models import SybaseTableDataset # type: ignore from ._models import TabularSource # type: ignore from ._models import TabularTranslator # type: ignore + from ._models import TarGZipReadSettings # type: ignore + from ._models import TarReadSettings # type: ignore from ._models import TeradataLinkedService # type: ignore from ._models import TeradataPartitionSettings # type: ignore from ._models import TeradataSource # type: ignore @@ -1240,10 +1300,12 @@ DatasetCompressionLevel, DayOfWeek, DaysOfWeek, + Db2AuthenticationType, DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, DynamicsServicePrincipalCredentialType, + DynamicsSinkWriteBehavior, EventSubscriptionStatus, FtpAuthenticationType, GlobalParameterType, @@ -1272,7 +1334,7 @@ ManagedIntegrationRuntimeNodeStatus, MongoDBAuthenticationType, NetezzaPartitionOption, - ODataAadServicePrincipalCredentialType, + ODataAADServicePrincipalCredentialType, ODataAuthenticationType, OraclePartitionOption, OrcCompressionCodec, @@ -1280,12 +1342,14 @@ PhoenixAuthenticationType, PolybaseSettingsRejectType, PrestoAuthenticationType, + PublicNetworkAccess, RecurrenceFrequency, RestServiceAuthenticationType, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrder, RunQueryOrderByField, + SQLPartitionOption, SalesforceSinkWriteBehavior, SalesforceSourceReadBehavior, SapCloudForCustomerSinkWriteBehavior, @@ -1298,7 +1362,7 @@ SparkAuthenticationType, SparkServerType, SparkThriftTransportProtocol, - SqlPartitionOption, + SsisLogLocationType, SsisObjectMetadataType, SsisPackageLocationType, StoredProcedureParameterType, @@ -1311,6 +1375,7 @@ VariableType, WebActivityMethod, WebAuthenticationType, + WebHookActivityMethod, ) __all__ = [ @@ -1338,6 +1403,7 @@ 'AvroSink', 'AvroSource', 'AvroWriteSettings', + 'AzPowerShellSetup', 'AzureBatchLinkedService', 'AzureBlobDataset', 'AzureBlobFsDataset', @@ -1364,10 +1430,17 @@ 'AzureDataLakeStoreSink', 'AzureDataLakeStoreSource', 'AzureDataLakeStoreWriteSettings', + 'AzureDatabricksDeltaLakeDataset', + 'AzureDatabricksDeltaLakeExportCommand', + 'AzureDatabricksDeltaLakeImportCommand', + 'AzureDatabricksDeltaLakeLinkedService', + 'AzureDatabricksDeltaLakeSink', + 'AzureDatabricksDeltaLakeSource', 'AzureDatabricksLinkedService', 'AzureFileStorageLinkedService', 'AzureFileStorageLocation', 'AzureFileStorageReadSettings', + 'AzureFileStorageWriteSettings', 'AzureFunctionActivity', 'AzureFunctionLinkedService', 'AzureKeyVaultLinkedService', @@ -1381,26 +1454,26 @@ 'AzureMlServiceLinkedService', 'AzureMlUpdateResourceActivity', 'AzureMlWebServiceFile', - 'AzureMySqlLinkedService', - 'AzureMySqlSink', - 'AzureMySqlSource', - 'AzureMySqlTableDataset', - 'AzurePostgreSqlLinkedService', - 'AzurePostgreSqlSink', - 'AzurePostgreSqlSource', - 'AzurePostgreSqlTableDataset', + 'AzureMySQLLinkedService', + 'AzureMySQLSink', + 'AzureMySQLSource', + 'AzureMySQLTableDataset', + 'AzurePostgreSQLLinkedService', + 'AzurePostgreSQLSink', + 'AzurePostgreSQLSource', + 'AzurePostgreSQLTableDataset', 'AzureQueueSink', + 'AzureSQLDWLinkedService', + 'AzureSQLDWTableDataset', + 'AzureSQLDatabaseLinkedService', + 'AzureSQLMiLinkedService', + 'AzureSQLMiTableDataset', + 'AzureSQLSink', + 'AzureSQLSource', + 'AzureSQLTableDataset', 'AzureSearchIndexDataset', 'AzureSearchIndexSink', 'AzureSearchLinkedService', - 'AzureSqlDWLinkedService', - 'AzureSqlDWTableDataset', - 'AzureSqlDatabaseLinkedService', - 'AzureSqlMiLinkedService', - 'AzureSqlMiTableDataset', - 'AzureSqlSink', - 'AzureSqlSource', - 'AzureSqlTableDataset', 'AzureStorageLinkedService', 'AzureTableDataset', 'AzureTableSink', @@ -1429,8 +1502,10 @@ 'ConcurLinkedService', 'ConcurObjectDataset', 'ConcurSource', + 'ConnectionStateProperties', 'ControlActivity', 'CopyActivity', + 'CopyActivityLogSettings', 'CopySink', 'CopySource', 'CopyTranslator', @@ -1439,9 +1514,9 @@ 'CosmosDBMongoDBApiLinkedService', 'CosmosDBMongoDBApiSink', 'CosmosDBMongoDBApiSource', - 'CosmosDBSqlApiCollectionDataset', - 'CosmosDBSqlApiSink', - 'CosmosDBSqlApiSource', + 'CosmosDBSQLApiCollectionDataset', + 'CosmosDBSQLApiSink', + 'CosmosDBSQLApiSource', 'CouchbaseLinkedService', 'CouchbaseSource', 'CouchbaseTableDataset', @@ -1489,6 +1564,8 @@ 'DatasetResource', 'DatasetSchemaDataElement', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1532,6 +1609,8 @@ 'ExecuteSsisPackageActivity', 'ExecutionActivity', 'ExportSettings', + 'ExposureControlBatchRequest', + 'ExposureControlBatchResponse', 'ExposureControlRequest', 'ExposureControlResponse', 'Expression', @@ -1649,6 +1728,8 @@ 'LinkedServiceListResponse', 'LinkedServiceReference', 'LinkedServiceResource', + 'LogLocationSettings', + 'LogSettings', 'LogStorageSettings', 'LookupActivity', 'MagentoLinkedService', @@ -1659,6 +1740,12 @@ 'ManagedIntegrationRuntimeNode', 'ManagedIntegrationRuntimeOperationResult', 'ManagedIntegrationRuntimeStatus', + 'ManagedPrivateEndpoint', + 'ManagedPrivateEndpointListResponse', + 'ManagedPrivateEndpointResource', + 'ManagedVirtualNetwork', + 'ManagedVirtualNetworkListResponse', + 'ManagedVirtualNetworkResource', 'MappingDataFlow', 'MariaDBLinkedService', 'MariaDBSource', @@ -1670,6 +1757,9 @@ 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', + 'MongoDBAtlasCollectionDataset', + 'MongoDBAtlasLinkedService', + 'MongoDBAtlasSource', 'MongoDBCollectionDataset', 'MongoDBCursorMethodsProperties', 'MongoDBLinkedService', @@ -1678,9 +1768,9 @@ 'MongoDBV2LinkedService', 'MongoDBV2Source', 'MultiplePipelineTrigger', - 'MySqlLinkedService', - 'MySqlSource', - 'MySqlTableDataset', + 'MySQLLinkedService', + 'MySQLSource', + 'MySQLTableDataset', 'NetezzaLinkedService', 'NetezzaPartitionSettings', 'NetezzaSource', @@ -1715,12 +1805,14 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', @@ -1734,9 +1826,9 @@ 'PipelineRunInvokedBy', 'PipelineRunsQueryResponse', 'PolybaseSettings', - 'PostgreSqlLinkedService', - 'PostgreSqlSource', - 'PostgreSqlTableDataset', + 'PostgreSQLLinkedService', + 'PostgreSQLSource', + 'PostgreSQLTableDataset', 'PrestoLinkedService', 'PrestoObjectDataset', 'PrestoSource', @@ -1757,11 +1849,24 @@ 'ResponsysSource', 'RestResourceDataset', 'RestServiceLinkedService', + 'RestSink', 'RestSource', 'RetryPolicy', 'RunFilterParameters', 'RunQueryFilter', 'RunQueryOrderBy', + 'SQLDWSink', + 'SQLDWSource', + 'SQLMiSink', + 'SQLMiSource', + 'SQLPartitionSettings', + 'SQLServerLinkedService', + 'SQLServerSink', + 'SQLServerSource', + 'SQLServerStoredProcedureActivity', + 'SQLServerTableDataset', + 'SQLSink', + 'SQLSource', 'SalesforceLinkedService', 'SalesforceMarketingCloudLinkedService', 'SalesforceMarketingCloudObjectDataset', @@ -1827,18 +1932,6 @@ 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', - 'SqlDWSink', - 'SqlDWSource', - 'SqlMiSink', - 'SqlMiSource', - 'SqlPartitionSettings', - 'SqlServerLinkedService', - 'SqlServerSink', - 'SqlServerSource', - 'SqlServerStoredProcedureActivity', - 'SqlServerTableDataset', - 'SqlSink', - 'SqlSource', 'SquareLinkedService', 'SquareObjectDataset', 'SquareSource', @@ -1872,6 +1965,8 @@ 'SybaseTableDataset', 'TabularSource', 'TabularTranslator', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', @@ -1935,10 +2030,12 @@ 'DatasetCompressionLevel', 'DayOfWeek', 'DaysOfWeek', + 'Db2AuthenticationType', 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', 'DynamicsServicePrincipalCredentialType', + 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', 'FtpAuthenticationType', 'GlobalParameterType', @@ -1967,7 +2064,7 @@ 'ManagedIntegrationRuntimeNodeStatus', 'MongoDBAuthenticationType', 'NetezzaPartitionOption', - 'ODataAadServicePrincipalCredentialType', + 'ODataAADServicePrincipalCredentialType', 'ODataAuthenticationType', 'OraclePartitionOption', 'OrcCompressionCodec', @@ -1975,12 +2072,14 @@ 'PhoenixAuthenticationType', 'PolybaseSettingsRejectType', 'PrestoAuthenticationType', + 'PublicNetworkAccess', 'RecurrenceFrequency', 'RestServiceAuthenticationType', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrder', 'RunQueryOrderByField', + 'SQLPartitionOption', 'SalesforceSinkWriteBehavior', 'SalesforceSourceReadBehavior', 'SapCloudForCustomerSinkWriteBehavior', @@ -1993,7 +2092,7 @@ 'SparkAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', - 'SqlPartitionOption', + 'SsisLogLocationType', 'SsisObjectMetadataType', 'SsisPackageLocationType', 'StoredProcedureParameterType', @@ -2006,4 +2105,5 @@ 'VariableType', 'WebActivityMethod', 'WebAuthenticationType', + 'WebHookActivityMethod', ] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index 6ad8a7e51ce..dbaf03c8e17 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -6,731 +6,784 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum - -class AvroCompressionCodec(str, Enum): - - none = "none" - deflate = "deflate" - snappy = "snappy" - xz = "xz" - bzip2 = "bzip2" - -class AzureFunctionActivityMethod(str, Enum): +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AvroCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "none" + DEFLATE = "deflate" + SNAPPY = "snappy" + XZ = "xz" + BZIP2 = "bzip2" + +class AzureFunctionActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The list of HTTP methods supported by a AzureFunctionActivity. """ - get = "GET" - post = "POST" - put = "PUT" - delete = "DELETE" - options = "OPTIONS" - head = "HEAD" - trace = "TRACE" + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + OPTIONS = "OPTIONS" + HEAD = "HEAD" + TRACE = "TRACE" -class AzureSearchIndexWriteBehaviorType(str, Enum): +class AzureSearchIndexWriteBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specify the write behavior when upserting documents into Azure Search Index. """ - merge = "Merge" - upload = "Upload" + MERGE = "Merge" + UPLOAD = "Upload" -class BlobEventTypes(str, Enum): +class BlobEventTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" - microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" + MICROSOFT_STORAGE_BLOB_CREATED = "Microsoft.Storage.BlobCreated" + MICROSOFT_STORAGE_BLOB_DELETED = "Microsoft.Storage.BlobDeleted" -class CassandraSourceReadConsistencyLevels(str, Enum): +class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. """ - all = "ALL" - each_quorum = "EACH_QUORUM" - quorum = "QUORUM" - local_quorum = "LOCAL_QUORUM" - one = "ONE" - two = "TWO" - three = "THREE" - local_one = "LOCAL_ONE" - serial = "SERIAL" - local_serial = "LOCAL_SERIAL" - -class CompressionCodec(str, Enum): - - none = "none" - gzip = "gzip" - snappy = "snappy" - lzo = "lzo" - bzip2 = "bzip2" - deflate = "deflate" - zip_deflate = "zipDeflate" - lz4 = "lz4" - -class CopyBehaviorType(str, Enum): + ALL = "ALL" + EACH_QUORUM = "EACH_QUORUM" + QUORUM = "QUORUM" + LOCAL_QUORUM = "LOCAL_QUORUM" + ONE = "ONE" + TWO = "TWO" + THREE = "THREE" + LOCAL_ONE = "LOCAL_ONE" + SERIAL = "SERIAL" + LOCAL_SERIAL = "LOCAL_SERIAL" + +class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "none" + GZIP = "gzip" + SNAPPY = "snappy" + LZO = "lzo" + BZIP2 = "bzip2" + DEFLATE = "deflate" + ZIP_DEFLATE = "zipDeflate" + LZ4 = "lz4" + TAR = "tar" + TAR_G_ZIP = "tarGZip" + +class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available types of copy behavior. """ - preserve_hierarchy = "PreserveHierarchy" - flatten_hierarchy = "FlattenHierarchy" - merge_files = "MergeFiles" + PRESERVE_HIERARCHY = "PreserveHierarchy" + FLATTEN_HIERARCHY = "FlattenHierarchy" + MERGE_FILES = "MergeFiles" -class DataFlowComputeType(str, Enum): +class DataFlowComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Compute type of the cluster which will execute data flow job. """ - general = "General" - memory_optimized = "MemoryOptimized" - compute_optimized = "ComputeOptimized" + GENERAL = "General" + MEMORY_OPTIMIZED = "MemoryOptimized" + COMPUTE_OPTIMIZED = "ComputeOptimized" -class DataFlowDebugCommandType(str, Enum): +class DataFlowDebugCommandType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The command type. """ - execute_preview_query = "executePreviewQuery" - execute_statistics_query = "executeStatisticsQuery" - execute_expression_query = "executeExpressionQuery" + EXECUTE_PREVIEW_QUERY = "executePreviewQuery" + EXECUTE_STATISTICS_QUERY = "executeStatisticsQuery" + EXECUTE_EXPRESSION_QUERY = "executeExpressionQuery" -class DatasetCompressionLevel(str, Enum): +class DatasetCompressionLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available compression levels. """ - optimal = "Optimal" - fastest = "Fastest" + OPTIMAL = "Optimal" + FASTEST = "Fastest" -class DayOfWeek(str, Enum): +class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The days of the week. """ - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" + SUNDAY = "Sunday" + MONDAY = "Monday" + TUESDAY = "Tuesday" + WEDNESDAY = "Wednesday" + THURSDAY = "Thursday" + FRIDAY = "Friday" + SATURDAY = "Saturday" -class DaysOfWeek(str, Enum): +class DaysOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" + SUNDAY = "Sunday" + MONDAY = "Monday" + TUESDAY = "Tuesday" + WEDNESDAY = "Wednesday" + THURSDAY = "Thursday" + FRIDAY = "Friday" + SATURDAY = "Saturday" + +class Db2AuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """AuthenticationType to be used for connection. It is mutually exclusive with connectionString + property. + """ -class DependencyCondition(str, Enum): + BASIC = "Basic" - succeeded = "Succeeded" - failed = "Failed" - skipped = "Skipped" - completed = "Completed" +class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): -class DynamicsAuthenticationType(str, Enum): + SUCCEEDED = "Succeeded" + FAILED = "Failed" + SKIPPED = "Skipped" + COMPLETED = "Completed" + +class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). """ - office365 = "Office365" - ifd = "Ifd" - aad_service_principal = "AADServicePrincipal" + OFFICE365 = "Office365" + IFD = "Ifd" + AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" -class DynamicsDeploymentType(str, Enum): +class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). """ - online = "Online" - on_premises_with_ifd = "OnPremisesWithIfd" + ONLINE = "Online" + ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsServicePrincipalCredentialType(str, Enum): +class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). """ - service_principal_key = "ServicePrincipalKey" - service_principal_cert = "ServicePrincipalCert" + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + +class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Defines values for DynamicsSinkWriteBehavior. + """ + + UPSERT = "Upsert" -class EventSubscriptionStatus(str, Enum): +class EventSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Event Subscription Status. """ - enabled = "Enabled" - provisioning = "Provisioning" - deprovisioning = "Deprovisioning" - disabled = "Disabled" - unknown = "Unknown" + ENABLED = "Enabled" + PROVISIONING = "Provisioning" + DEPROVISIONING = "Deprovisioning" + DISABLED = "Disabled" + UNKNOWN = "Unknown" -class FtpAuthenticationType(str, Enum): +class FtpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ - basic = "Basic" - anonymous = "Anonymous" + BASIC = "Basic" + ANONYMOUS = "Anonymous" -class GlobalParameterType(str, Enum): +class GlobalParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Global Parameter type. """ - object = "Object" - string = "String" - int = "Int" - float = "Float" - bool = "Bool" - array = "Array" + OBJECT = "Object" + STRING = "String" + INT = "Int" + FLOAT = "Float" + BOOL = "Bool" + ARRAY = "Array" -class GoogleAdWordsAuthenticationType(str, Enum): +class GoogleAdWordsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ - service_authentication = "ServiceAuthentication" - user_authentication = "UserAuthentication" + SERVICE_AUTHENTICATION = "ServiceAuthentication" + USER_AUTHENTICATION = "UserAuthentication" -class GoogleBigQueryAuthenticationType(str, Enum): +class GoogleBigQueryAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ - service_authentication = "ServiceAuthentication" - user_authentication = "UserAuthentication" + SERVICE_AUTHENTICATION = "ServiceAuthentication" + USER_AUTHENTICATION = "UserAuthentication" -class HBaseAuthenticationType(str, Enum): +class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism to use to connect to the HBase server. """ - anonymous = "Anonymous" - basic = "Basic" + ANONYMOUS = "Anonymous" + BASIC = "Basic" -class HdiNodeTypes(str, Enum): +class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The node types on which the script action should be executed. """ - headnode = "Headnode" - workernode = "Workernode" - zookeeper = "Zookeeper" + HEADNODE = "Headnode" + WORKERNODE = "Workernode" + ZOOKEEPER = "Zookeeper" -class HdInsightActivityDebugInfoOption(str, Enum): +class HdInsightActivityDebugInfoOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The HDInsightActivityDebugInfoOption settings to use. """ - none = "None" - always = "Always" - failure = "Failure" + NONE = "None" + ALWAYS = "Always" + FAILURE = "Failure" -class HiveAuthenticationType(str, Enum): +class HiveAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Hive server. """ - anonymous = "Anonymous" - username = "Username" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME = "Username" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class HiveServerType(str, Enum): +class HiveServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of Hive server. """ - hive_server1 = "HiveServer1" - hive_server2 = "HiveServer2" - hive_thrift_server = "HiveThriftServer" + HIVE_SERVER1 = "HiveServer1" + HIVE_SERVER2 = "HiveServer2" + HIVE_THRIFT_SERVER = "HiveThriftServer" -class HiveThriftTransportProtocol(str, Enum): +class HiveThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ - binary = "Binary" - sasl = "SASL" - http = "HTTP " + BINARY = "Binary" + SASL = "SASL" + HTTP = "HTTP " -class HttpAuthenticationType(str, Enum): +class HttpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the HTTP server. """ - basic = "Basic" - anonymous = "Anonymous" - digest = "Digest" - windows = "Windows" - client_certificate = "ClientCertificate" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + DIGEST = "Digest" + WINDOWS = "Windows" + CLIENT_CERTIFICATE = "ClientCertificate" -class ImpalaAuthenticationType(str, Enum): +class ImpalaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ - anonymous = "Anonymous" - sasl_username = "SASLUsername" - username_and_password = "UsernameAndPassword" + ANONYMOUS = "Anonymous" + SASL_USERNAME = "SASLUsername" + USERNAME_AND_PASSWORD = "UsernameAndPassword" -class IntegrationRuntimeAuthKeyName(str, Enum): +class IntegrationRuntimeAuthKeyName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The name of the authentication key to regenerate. """ - auth_key1 = "authKey1" - auth_key2 = "authKey2" + AUTH_KEY1 = "authKey1" + AUTH_KEY2 = "authKey2" -class IntegrationRuntimeAutoUpdate(str, Enum): +class IntegrationRuntimeAutoUpdate(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The state of integration runtime auto update. """ - on = "On" - off = "Off" + ON = "On" + OFF = "Off" -class IntegrationRuntimeEdition(str, Enum): +class IntegrationRuntimeEdition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The edition for the SSIS Integration Runtime """ - standard = "Standard" - enterprise = "Enterprise" + STANDARD = "Standard" + ENTERPRISE = "Enterprise" -class IntegrationRuntimeEntityReferenceType(str, Enum): +class IntegrationRuntimeEntityReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of this referenced entity. """ - integration_runtime_reference = "IntegrationRuntimeReference" - linked_service_reference = "LinkedServiceReference" + INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" + LINKED_SERVICE_REFERENCE = "LinkedServiceReference" -class IntegrationRuntimeInternalChannelEncryptionMode(str, Enum): +class IntegrationRuntimeInternalChannelEncryptionMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted integration runtime nodes exist). """ - not_set = "NotSet" - ssl_encrypted = "SslEncrypted" - not_encrypted = "NotEncrypted" + NOT_SET = "NotSet" + SSL_ENCRYPTED = "SslEncrypted" + NOT_ENCRYPTED = "NotEncrypted" -class IntegrationRuntimeLicenseType(str, Enum): +class IntegrationRuntimeLicenseType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """License type for bringing your own license scenario. """ - base_price = "BasePrice" - license_included = "LicenseIncluded" + BASE_PRICE = "BasePrice" + LICENSE_INCLUDED = "LicenseIncluded" -class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): +class IntegrationRuntimeSsisCatalogPricingTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/ """ - basic = "Basic" - standard = "Standard" - premium = "Premium" - premium_rs = "PremiumRS" + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + PREMIUM_RS = "PremiumRS" -class IntegrationRuntimeState(str, Enum): +class IntegrationRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The state of integration runtime. """ - initial = "Initial" - stopped = "Stopped" - started = "Started" - starting = "Starting" - stopping = "Stopping" - need_registration = "NeedRegistration" - online = "Online" - limited = "Limited" - offline = "Offline" - access_denied = "AccessDenied" + INITIAL = "Initial" + STOPPED = "Stopped" + STARTED = "Started" + STARTING = "Starting" + STOPPING = "Stopping" + NEED_REGISTRATION = "NeedRegistration" + ONLINE = "Online" + LIMITED = "Limited" + OFFLINE = "Offline" + ACCESS_DENIED = "AccessDenied" -class IntegrationRuntimeType(str, Enum): +class IntegrationRuntimeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of integration runtime. """ - managed = "Managed" - self_hosted = "SelfHosted" + MANAGED = "Managed" + SELF_HOSTED = "SelfHosted" -class IntegrationRuntimeUpdateResult(str, Enum): +class IntegrationRuntimeUpdateResult(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The result of the last integration runtime node update. """ - none = "None" - succeed = "Succeed" - fail = "Fail" + NONE = "None" + SUCCEED = "Succeed" + FAIL = "Fail" -class JsonFormatFilePattern(str, Enum): +class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """JSON format file pattern. A property of JsonFormat. """ - set_of_objects = "setOfObjects" - array_of_objects = "arrayOfObjects" + SET_OF_OBJECTS = "setOfObjects" + ARRAY_OF_OBJECTS = "arrayOfObjects" -class JsonWriteFilePattern(str, Enum): +class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. """ - set_of_objects = "setOfObjects" - array_of_objects = "arrayOfObjects" + SET_OF_OBJECTS = "setOfObjects" + ARRAY_OF_OBJECTS = "arrayOfObjects" -class ManagedIntegrationRuntimeNodeStatus(str, Enum): +class ManagedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The managed integration runtime node status. """ - starting = "Starting" - available = "Available" - recycling = "Recycling" - unavailable = "Unavailable" + STARTING = "Starting" + AVAILABLE = "Available" + RECYCLING = "Recycling" + UNAVAILABLE = "Unavailable" -class MongoDBAuthenticationType(str, Enum): +class MongoDBAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the MongoDB database. """ - basic = "Basic" - anonymous = "Anonymous" + BASIC = "Basic" + ANONYMOUS = "Anonymous" -class NetezzaPartitionOption(str, Enum): +class NetezzaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Netezza read in parallel. """ - none = "None" - data_slice = "DataSlice" - dynamic_range = "DynamicRange" + NONE = "None" + DATA_SLICE = "DataSlice" + DYNAMIC_RANGE = "DynamicRange" -class ODataAadServicePrincipalCredentialType(str, Enum): +class ODataAADServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specify the credential type (key or cert) is used for service principal. """ - service_principal_key = "ServicePrincipalKey" - service_principal_cert = "ServicePrincipalCert" + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class ODataAuthenticationType(str, Enum): +class ODataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the OData service. """ - basic = "Basic" - anonymous = "Anonymous" - windows = "Windows" - aad_service_principal = "AadServicePrincipal" - managed_service_identity = "ManagedServiceIdentity" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + WINDOWS = "Windows" + AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" + MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class OraclePartitionOption(str, Enum): +class OraclePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Oracle read in parallel. """ - none = "None" - physical_partitions_of_table = "PhysicalPartitionsOfTable" - dynamic_range = "DynamicRange" + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + DYNAMIC_RANGE = "DynamicRange" -class OrcCompressionCodec(str, Enum): +class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - none = "none" - zlib = "zlib" - snappy = "snappy" + NONE = "none" + ZLIB = "zlib" + SNAPPY = "snappy" + LZO = "lzo" -class ParameterType(str, Enum): +class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. """ - object = "Object" - string = "String" - int = "Int" - float = "Float" - bool = "Bool" - array = "Array" - secure_string = "SecureString" + OBJECT = "Object" + STRING = "String" + INT = "Int" + FLOAT = "Float" + BOOL = "Bool" + ARRAY = "Array" + SECURE_STRING = "SecureString" -class PhoenixAuthenticationType(str, Enum): +class PhoenixAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Phoenix server. """ - anonymous = "Anonymous" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class PolybaseSettingsRejectType(str, Enum): +class PolybaseSettingsRejectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Indicates whether the RejectValue property is specified as a literal value or a percentage. """ - value = "value" - percentage = "percentage" + VALUE = "value" + PERCENTAGE = "percentage" -class PrestoAuthenticationType(str, Enum): +class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Presto server. """ - anonymous = "Anonymous" - ldap = "LDAP" + ANONYMOUS = "Anonymous" + LDAP = "LDAP" -class RecurrenceFrequency(str, Enum): +class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Whether or not public network access is allowed for the data factory. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible frequency option for the schedule trigger. """ - not_specified = "NotSpecified" - minute = "Minute" - hour = "Hour" - day = "Day" - week = "Week" - month = "Month" - year = "Year" + NOT_SPECIFIED = "NotSpecified" + MINUTE = "Minute" + HOUR = "Hour" + DAY = "Day" + WEEK = "Week" + MONTH = "Month" + YEAR = "Year" -class RestServiceAuthenticationType(str, Enum): +class RestServiceAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the REST service. """ - anonymous = "Anonymous" - basic = "Basic" - aad_service_principal = "AadServicePrincipal" - managed_service_identity = "ManagedServiceIdentity" + ANONYMOUS = "Anonymous" + BASIC = "Basic" + AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" + MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class RunQueryFilterOperand(str, Enum): +class RunQueryFilterOperand(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. """ - pipeline_name = "PipelineName" - status = "Status" - run_start = "RunStart" - run_end = "RunEnd" - activity_name = "ActivityName" - activity_run_start = "ActivityRunStart" - activity_run_end = "ActivityRunEnd" - activity_type = "ActivityType" - trigger_name = "TriggerName" - trigger_run_timestamp = "TriggerRunTimestamp" - run_group_id = "RunGroupId" - latest_only = "LatestOnly" - -class RunQueryFilterOperator(str, Enum): + PIPELINE_NAME = "PipelineName" + STATUS = "Status" + RUN_START = "RunStart" + RUN_END = "RunEnd" + ACTIVITY_NAME = "ActivityName" + ACTIVITY_RUN_START = "ActivityRunStart" + ACTIVITY_RUN_END = "ActivityRunEnd" + ACTIVITY_TYPE = "ActivityType" + TRIGGER_NAME = "TriggerName" + TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" + RUN_GROUP_ID = "RunGroupId" + LATEST_ONLY = "LatestOnly" + +class RunQueryFilterOperator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Operator to be used for filter. """ - equals = "Equals" - not_equals = "NotEquals" - in_enum = "In" - not_in = "NotIn" + EQUALS = "Equals" + NOT_EQUALS = "NotEquals" + IN_ENUM = "In" + NOT_IN = "NotIn" -class RunQueryOrder(str, Enum): +class RunQueryOrder(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Sorting order of the parameter. """ - asc = "ASC" - desc = "DESC" + ASC = "ASC" + DESC = "DESC" -class RunQueryOrderByField(str, Enum): +class RunQueryOrderByField(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. """ - run_start = "RunStart" - run_end = "RunEnd" - pipeline_name = "PipelineName" - status = "Status" - activity_name = "ActivityName" - activity_run_start = "ActivityRunStart" - activity_run_end = "ActivityRunEnd" - trigger_name = "TriggerName" - trigger_run_timestamp = "TriggerRunTimestamp" + RUN_START = "RunStart" + RUN_END = "RunEnd" + PIPELINE_NAME = "PipelineName" + STATUS = "Status" + ACTIVITY_NAME = "ActivityName" + ACTIVITY_RUN_START = "ActivityRunStart" + ACTIVITY_RUN_END = "ActivityRunEnd" + TRIGGER_NAME = "TriggerName" + TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" -class SalesforceSinkWriteBehavior(str, Enum): +class SalesforceSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The write behavior for the operation. Default is Insert. """ - insert = "Insert" - upsert = "Upsert" + INSERT = "Insert" + UPSERT = "Upsert" -class SalesforceSourceReadBehavior(str, Enum): +class SalesforceSourceReadBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The read behavior for the operation. Default is Query. """ - query = "Query" - query_all = "QueryAll" + QUERY = "Query" + QUERY_ALL = "QueryAll" -class SapCloudForCustomerSinkWriteBehavior(str, Enum): +class SapCloudForCustomerSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The write behavior for the operation. Default is 'Insert'. """ - insert = "Insert" - update = "Update" + INSERT = "Insert" + UPDATE = "Update" -class SapHanaAuthenticationType(str, Enum): +class SapHanaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the SAP HANA server. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class SapHanaPartitionOption(str, Enum): +class SapHanaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for SAP HANA read in parallel. """ - none = "None" - physical_partitions_of_table = "PhysicalPartitionsOfTable" - sap_hana_dynamic_range = "SapHanaDynamicRange" + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + SAP_HANA_DYNAMIC_RANGE = "SapHanaDynamicRange" -class SapTablePartitionOption(str, Enum): +class SapTablePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for SAP table read in parallel. """ - none = "None" - partition_on_int = "PartitionOnInt" - partition_on_calendar_year = "PartitionOnCalendarYear" - partition_on_calendar_month = "PartitionOnCalendarMonth" - partition_on_calendar_date = "PartitionOnCalendarDate" - partition_on_time = "PartitionOnTime" + NONE = "None" + PARTITION_ON_INT = "PartitionOnInt" + PARTITION_ON_CALENDAR_YEAR = "PartitionOnCalendarYear" + PARTITION_ON_CALENDAR_MONTH = "PartitionOnCalendarMonth" + PARTITION_ON_CALENDAR_DATE = "PartitionOnCalendarDate" + PARTITION_ON_TIME = "PartitionOnTime" -class SelfHostedIntegrationRuntimeNodeStatus(str, Enum): +class SelfHostedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Status of the integration runtime node. """ - need_registration = "NeedRegistration" - online = "Online" - limited = "Limited" - offline = "Offline" - upgrading = "Upgrading" - initializing = "Initializing" - initialize_failed = "InitializeFailed" + NEED_REGISTRATION = "NeedRegistration" + ONLINE = "Online" + LIMITED = "Limited" + OFFLINE = "Offline" + UPGRADING = "Upgrading" + INITIALIZING = "Initializing" + INITIALIZE_FAILED = "InitializeFailed" -class ServiceNowAuthenticationType(str, Enum): +class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ - basic = "Basic" - o_auth2 = "OAuth2" + BASIC = "Basic" + O_AUTH2 = "OAuth2" -class SftpAuthenticationType(str, Enum): +class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ - basic = "Basic" - ssh_public_key = "SshPublicKey" + BASIC = "Basic" + SSH_PUBLIC_KEY = "SshPublicKey" -class SparkAuthenticationType(str, Enum): +class SparkAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Spark server. """ - anonymous = "Anonymous" - username = "Username" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME = "Username" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class SparkServerType(str, Enum): +class SparkServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of Spark server. """ - shark_server = "SharkServer" - shark_server2 = "SharkServer2" - spark_thrift_server = "SparkThriftServer" + SHARK_SERVER = "SharkServer" + SHARK_SERVER2 = "SharkServer2" + SPARK_THRIFT_SERVER = "SparkThriftServer" -class SparkThriftTransportProtocol(str, Enum): +class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ - binary = "Binary" - sasl = "SASL" - http = "HTTP " + BINARY = "Binary" + SASL = "SASL" + HTTP = "HTTP " -class SqlPartitionOption(str, Enum): +class SQLPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Sql read in parallel. """ - none = "None" - physical_partitions_of_table = "PhysicalPartitionsOfTable" - dynamic_range = "DynamicRange" + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + DYNAMIC_RANGE = "DynamicRange" + +class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of SSIS log location. + """ -class SsisObjectMetadataType(str, Enum): + FILE = "File" + +class SsisObjectMetadataType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS object metadata. """ - folder = "Folder" - project = "Project" - package = "Package" - environment = "Environment" + FOLDER = "Folder" + PROJECT = "Project" + PACKAGE = "Package" + ENVIRONMENT = "Environment" -class SsisPackageLocationType(str, Enum): +class SsisPackageLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS package location. """ - ssisdb = "SSISDB" - file = "File" - inline_package = "InlinePackage" - package_store = "PackageStore" + SSISDB = "SSISDB" + FILE = "File" + INLINE_PACKAGE = "InlinePackage" + PACKAGE_STORE = "PackageStore" -class StoredProcedureParameterType(str, Enum): +class StoredProcedureParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Stored procedure parameter type. """ - string = "String" - int = "Int" - int64 = "Int64" - decimal = "Decimal" - guid = "Guid" - boolean = "Boolean" - date = "Date" + STRING = "String" + INT = "Int" + INT64 = "Int64" + DECIMAL = "Decimal" + GUID = "Guid" + BOOLEAN = "Boolean" + DATE = "Date" -class SybaseAuthenticationType(str, Enum): +class SybaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class TeradataAuthenticationType(str, Enum): +class TeradataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class TeradataPartitionOption(str, Enum): +class TeradataPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for teradata read in parallel. """ - none = "None" - hash = "Hash" - dynamic_range = "DynamicRange" + NONE = "None" + HASH = "Hash" + DYNAMIC_RANGE = "DynamicRange" -class TriggerRunStatus(str, Enum): +class TriggerRunStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Trigger run status. """ - succeeded = "Succeeded" - failed = "Failed" - inprogress = "Inprogress" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + INPROGRESS = "Inprogress" -class TriggerRuntimeState(str, Enum): +class TriggerRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible state of Triggers. """ - started = "Started" - stopped = "Stopped" - disabled = "Disabled" + STARTED = "Started" + STOPPED = "Stopped" + DISABLED = "Disabled" -class TumblingWindowFrequency(str, Enum): +class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible frequency option for the tumbling window trigger. """ - minute = "Minute" - hour = "Hour" + MINUTE = "Minute" + HOUR = "Hour" -class VariableType(str, Enum): +class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Variable type. """ - string = "String" - bool = "Bool" - array = "Array" + STRING = "String" + BOOL = "Bool" + ARRAY = "Array" -class WebActivityMethod(str, Enum): +class WebActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The list of HTTP methods supported by a WebActivity. """ - get = "GET" - post = "POST" - put = "PUT" - delete = "DELETE" + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" -class WebAuthenticationType(str, Enum): +class WebAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the web table source. """ - basic = "Basic" - anonymous = "Anonymous" - client_certificate = "ClientCertificate" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + CLIENT_CERTIFICATE = "ClientCertificate" + +class WebHookActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The list of HTTP methods supported by a WebHook activity. + """ + + POST = "POST" diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index c3f3c651421..7aa8f7877f8 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -84,7 +84,7 @@ def __init__( super(Activity, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.name = kwargs['name'] - self.type = 'Activity' + self.type = 'Activity' # type: str self.description = kwargs.get('description', None) self.depends_on = kwargs.get('depends_on', None) self.user_properties = kwargs.get('user_properties', None) @@ -342,7 +342,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBAtlasLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -375,7 +375,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbAtlas': 'MongoDBAtlasLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -384,7 +384,7 @@ def __init__( ): super(LinkedService, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'LinkedService' + self.type = 'LinkedService' # type: str self.connect_via = kwargs.get('connect_via', None) self.description = kwargs.get('description', None) self.parameters = kwargs.get('parameters', None) @@ -472,7 +472,7 @@ def __init__( **kwargs ): super(AmazonMwsLinkedService, self).__init__(**kwargs) - self.type = 'AmazonMWS' + self.type = 'AmazonMWS' # type: str self.endpoint = kwargs['endpoint'] self.marketplace_id = kwargs['marketplace_id'] self.seller_id = kwargs['seller_id'] @@ -489,7 +489,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBAtlasCollectionDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -535,7 +535,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDBAtlasCollectionDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -544,7 +544,7 @@ def __init__( ): super(Dataset, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Dataset' + self.type = 'Dataset' # type: str self.description = kwargs.get('description', None) self.structure = kwargs.get('structure', None) self.schema = kwargs.get('schema', None) @@ -608,7 +608,7 @@ def __init__( **kwargs ): super(AmazonMwsObjectDataset, self).__init__(**kwargs) - self.type = 'AmazonMWSObject' + self.type = 'AmazonMWSObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -616,7 +616,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSqlApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBAtlasSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -649,7 +649,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDBAtlasSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -658,7 +658,7 @@ def __init__( ): super(CopySource, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySource' + self.type = 'CopySource' # type: str self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) @@ -668,7 +668,7 @@ class TabularSource(CopySource): """Copy activity sources of tabular type. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMiSource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySQLSource, AzurePostgreSQLSource, AzureSQLSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySQLSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSQLSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SQLDWSource, SQLMiSource, SQLServerSource, SQLSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. All required parameters must be populated in order to send to Azure. @@ -709,7 +709,7 @@ class TabularSource(CopySource): } _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMiSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySQLSource', 'AzurePostgreSqlSource': 'AzurePostgreSQLSource', 'AzureSqlSource': 'AzureSQLSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySQLSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSQLSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SQLDWSource', 'SqlMISource': 'SQLMiSource', 'SqlServerSource': 'SQLServerSource', 'SqlSource': 'SQLSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} } def __init__( @@ -717,7 +717,7 @@ def __init__( **kwargs ): super(TabularSource, self).__init__(**kwargs) - self.type = 'TabularSource' + self.type = 'TabularSource' # type: str self.query_timeout = kwargs.get('query_timeout', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -772,7 +772,7 @@ def __init__( **kwargs ): super(AmazonMwsSource, self).__init__(**kwargs) - self.type = 'AmazonMWSSource' + self.type = 'AmazonMWSSource' # type: str self.query = kwargs.get('query', None) @@ -840,7 +840,7 @@ def __init__( **kwargs ): super(AmazonRedshiftLinkedService, self).__init__(**kwargs) - self.type = 'AmazonRedshift' + self.type = 'AmazonRedshift' # type: str self.server = kwargs['server'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -903,7 +903,7 @@ def __init__( **kwargs ): super(AmazonRedshiftSource, self).__init__(**kwargs) - self.type = 'AmazonRedshiftSource' + self.type = 'AmazonRedshiftSource' # type: str self.query = kwargs.get('query', None) self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) @@ -971,7 +971,7 @@ def __init__( **kwargs ): super(AmazonRedshiftTableDataset, self).__init__(**kwargs) - self.type = 'AmazonRedshiftTable' + self.type = 'AmazonRedshiftTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -1059,7 +1059,7 @@ def __init__( **kwargs ): super(AmazonS3Dataset, self).__init__(**kwargs) - self.type = 'AmazonS3Object' + self.type = 'AmazonS3Object' # type: str self.bucket_name = kwargs['bucket_name'] self.key = kwargs.get('key', None) self.prefix = kwargs.get('prefix', None) @@ -1088,6 +1088,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1098,6 +1101,9 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. Type: string + (or Expression with resultType string). + :type session_token: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1115,9 +1121,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1126,10 +1134,12 @@ def __init__( **kwargs ): super(AmazonS3LinkedService, self).__init__(**kwargs) - self.type = 'AmazonS3' + self.type = 'AmazonS3' # type: str + self.authentication_type = kwargs.get('authentication_type', None) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) + self.session_token = kwargs.get('session_token', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -1175,7 +1185,7 @@ def __init__( ): super(DatasetLocation, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetLocation' + self.type = 'DatasetLocation' # type: str self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) @@ -1222,7 +1232,7 @@ def __init__( **kwargs ): super(AmazonS3Location, self).__init__(**kwargs) - self.type = 'AmazonS3Location' + self.type = 'AmazonS3Location' # type: str self.bucket_name = kwargs.get('bucket_name', None) self.version = kwargs.get('version', None) @@ -1265,7 +1275,7 @@ def __init__( ): super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'StoreReadSettings' + self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) @@ -1339,7 +1349,7 @@ def __init__( **kwargs ): super(AmazonS3ReadSettings, self).__init__(**kwargs) - self.type = 'AmazonS3ReadSettings' + self.type = 'AmazonS3ReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -1397,7 +1407,7 @@ def __init__( **kwargs ): super(AppendVariableActivity, self).__init__(**kwargs) - self.type = 'AppendVariable' + self.type = 'AppendVariable' # type: str self.variable_name = kwargs.get('variable_name', None) self.value = kwargs.get('value', None) @@ -1465,7 +1475,7 @@ def __init__( **kwargs ): super(AvroDataset, self).__init__(**kwargs) - self.type = 'Avro' + self.type = 'Avro' # type: str self.location = kwargs.get('location', None) self.avro_compression_codec = kwargs.get('avro_compression_codec', None) self.avro_compression_level = kwargs.get('avro_compression_level', None) @@ -1511,7 +1521,7 @@ def __init__( ): super(DatasetStorageFormat, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetStorageFormat' + self.type = 'DatasetStorageFormat' # type: str self.serializer = kwargs.get('serializer', None) self.deserializer = kwargs.get('deserializer', None) @@ -1548,14 +1558,14 @@ def __init__( **kwargs ): super(AvroFormat, self).__init__(**kwargs) - self.type = 'AvroFormat' + self.type = 'AvroFormat' # type: str class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSqlApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. All required parameters must be populated in order to send to Azure. @@ -1596,7 +1606,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} } def __init__( @@ -1605,7 +1615,7 @@ def __init__( ): super(CopySink, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySink' + self.type = 'CopySink' # type: str self.write_batch_size = kwargs.get('write_batch_size', None) self.write_batch_timeout = kwargs.get('write_batch_timeout', None) self.sink_retry_count = kwargs.get('sink_retry_count', None) @@ -1665,7 +1675,7 @@ def __init__( **kwargs ): super(AvroSink, self).__init__(**kwargs) - self.type = 'AvroSink' + self.type = 'AvroSink' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) @@ -1715,7 +1725,7 @@ def __init__( **kwargs ): super(AvroSource, self).__init__(**kwargs) - self.type = 'AvroSource' + self.type = 'AvroSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -1724,7 +1734,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1745,7 +1755,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -1754,7 +1764,7 @@ def __init__( ): super(FormatWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'FormatWriteSettings' + self.type = 'FormatWriteSettings' # type: str class AvroWriteSettings(FormatWriteSettings): @@ -1771,6 +1781,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -1782,6 +1799,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -1789,9 +1808,73 @@ def __init__( **kwargs ): super(AvroWriteSettings, self).__init__(**kwargs) - self.type = 'AvroWriteSettings' + self.type = 'AvroWriteSettings' # type: str self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzPowerShellSetup, CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzPowerShellSetup': 'AzPowerShellSetup', 'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} + } + + def __init__( + self, + **kwargs + ): + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzPowerShellSetup(CustomSetupBase): + """The express custom setup of installing Azure PowerShell. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param version: Required. The required version of Azure PowerShell to install. + :type version: str + """ + + _validation = { + 'type': {'required': True}, + 'version': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzPowerShellSetup, self).__init__(**kwargs) + self.type = 'AzPowerShellSetup' # type: str + self.version = kwargs['version'] class AzureBatchLinkedService(LinkedService): @@ -1859,7 +1942,7 @@ def __init__( **kwargs ): super(AzureBatchLinkedService, self).__init__(**kwargs) - self.type = 'AzureBatch' + self.type = 'AzureBatch' # type: str self.account_name = kwargs['account_name'] self.access_key = kwargs.get('access_key', None) self.batch_uri = kwargs['batch_uri'] @@ -1945,7 +2028,7 @@ def __init__( **kwargs ): super(AzureBlobDataset, self).__init__(**kwargs) - self.type = 'AzureBlob' + self.type = 'AzureBlob' # type: str self.folder_path = kwargs.get('folder_path', None) self.table_root_location = kwargs.get('table_root_location', None) self.file_name = kwargs.get('file_name', None) @@ -2020,7 +2103,7 @@ def __init__( **kwargs ): super(AzureBlobFsDataset, self).__init__(**kwargs) - self.type = 'AzureBlobFSFile' + self.type = 'AzureBlobFSFile' # type: str self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) self.format = kwargs.get('format', None) @@ -2060,6 +2143,10 @@ class AzureBlobFsLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2083,6 +2170,7 @@ class AzureBlobFsLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -2091,12 +2179,13 @@ def __init__( **kwargs ): super(AzureBlobFsLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobFS' + self.type = 'AzureBlobFS' # type: str self.url = kwargs['url'] self.account_key = kwargs.get('account_key', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2138,7 +2227,7 @@ def __init__( **kwargs ): super(AzureBlobFsLocation, self).__init__(**kwargs) - self.type = 'AzureBlobFSLocation' + self.type = 'AzureBlobFSLocation' # type: str self.file_system = kwargs.get('file_system', None) @@ -2208,7 +2297,7 @@ def __init__( **kwargs ): super(AzureBlobFsReadSettings, self).__init__(**kwargs) - self.type = 'AzureBlobFSReadSettings' + self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -2269,7 +2358,7 @@ def __init__( **kwargs ): super(AzureBlobFsSink, self).__init__(**kwargs) - self.type = 'AzureBlobFSSink' + self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) @@ -2323,7 +2412,7 @@ def __init__( **kwargs ): super(AzureBlobFsSource, self).__init__(**kwargs) - self.type = 'AzureBlobFSSource' + self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) self.skip_header_line_count = kwargs.get('skip_header_line_count', None) self.recursive = kwargs.get('recursive', None) @@ -2333,7 +2422,7 @@ class StoreWriteSettings(msrest.serialization.Model): """Connector write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobFsWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. + sub-classes are: AzureBlobFsWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. @@ -2361,7 +2450,7 @@ class StoreWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFsWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFsWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureFileStorageWriteSettings': 'AzureFileStorageWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( @@ -2370,7 +2459,7 @@ def __init__( ): super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'StoreWriteSettings' + self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.copy_behavior = kwargs.get('copy_behavior', None) @@ -2412,7 +2501,7 @@ def __init__( **kwargs ): super(AzureBlobFsWriteSettings, self).__init__(**kwargs) - self.type = 'AzureBlobFSWriteSettings' + self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = kwargs.get('block_size_in_mb', None) @@ -2457,6 +2546,10 @@ class AzureBlobStorageLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2482,6 +2575,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } @@ -2490,7 +2584,7 @@ def __init__( **kwargs ): super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobStorage' + self.type = 'AzureBlobStorage' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_key = kwargs.get('account_key', None) self.sas_uri = kwargs.get('sas_uri', None) @@ -2499,6 +2593,7 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2540,7 +2635,7 @@ def __init__( **kwargs ): super(AzureBlobStorageLocation, self).__init__(**kwargs) - self.type = 'AzureBlobStorageLocation' + self.type = 'AzureBlobStorageLocation' # type: str self.container = kwargs.get('container', None) @@ -2614,7 +2709,7 @@ def __init__( **kwargs ): super(AzureBlobStorageReadSettings, self).__init__(**kwargs) - self.type = 'AzureBlobStorageReadSettings' + self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -2664,252 +2759,227 @@ def __init__( **kwargs ): super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) - self.type = 'AzureBlobStorageWriteSettings' + self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = kwargs.get('block_size_in_mb', None) -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~data_factory_management_client.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type database: object """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricks' - self.domain = kwargs['domain'] - self.access_token = kwargs['access_token'] - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.instance_pool_id = kwargs.get('instance_pool_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = kwargs.get('table', None) + self.database = kwargs.get('database', None) -class ExecutionActivity(Activity): - """Base class for all execution activities. +class ExportSettings(msrest.serialization.Model): + """Export command settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} } def __init__( self, **kwargs ): - super(ExecutionActivity, self).__init__(**kwargs) - self.type = 'Execution' - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ExportSettings' # type: str -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.type = 'AzureDataExplorerCommand' - self.command = kwargs['command'] - self.command_timeout = kwargs.get('command_timeout', None) + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. All required parameters must be populated in order to send to Azure. @@ -2926,31 +2996,25 @@ class AzureDataExplorerLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~data_factory_management_client.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, + 'domain': {'required': True}, } _attribute_map = { @@ -2960,28 +3024,453 @@ class AzureDataExplorerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataExplorer' - self.endpoint = kwargs['endpoint'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.database = kwargs['database'] - self.tenant = kwargs['tenant'] + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs.get('access_token', None) + self.cluster_id = kwargs.get('cluster_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :type access_token: ~data_factory_management_client.models.SecretBase + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :type new_cluster_log_destination: object + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs['access_token'] + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.instance_pool_id = kwargs.get('instance_pool_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_log_destination = kwargs.get('new_cluster_log_destination', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + } + + def __init__( + self, + **kwargs + ): + super(ExecutionActivity, self).__init__(**kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = kwargs['command'] + self.command_timeout = kwargs.get('command_timeout', None) + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~data_factory_management_client.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = kwargs['endpoint'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.database = kwargs['database'] + self.tenant = kwargs['tenant'] + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. @@ -3038,7 +3527,7 @@ def __init__( **kwargs ): super(AzureDataExplorerSink, self).__init__(**kwargs) - self.type = 'AzureDataExplorerSink' + self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) self.flush_immediately = kwargs.get('flush_immediately', None) @@ -3099,7 +3588,7 @@ def __init__( **kwargs ): super(AzureDataExplorerSource, self).__init__(**kwargs) - self.type = 'AzureDataExplorerSource' + self.type = 'AzureDataExplorerSource' # type: str self.query = kwargs['query'] self.no_truncation = kwargs.get('no_truncation', None) self.query_timeout = kwargs.get('query_timeout', None) @@ -3161,7 +3650,7 @@ def __init__( **kwargs ): super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.type = 'AzureDataExplorerTable' + self.type = 'AzureDataExplorerTable' # type: str self.table = kwargs.get('table', None) @@ -3238,7 +3727,7 @@ def __init__( **kwargs ): super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeAnalytics' + self.type = 'AzureDataLakeAnalytics' # type: str self.account_name = kwargs['account_name'] self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) @@ -3315,7 +3804,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreDataset, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreFile' + self.type = 'AzureDataLakeStoreFile' # type: str self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) self.format = kwargs.get('format', None) @@ -3352,6 +3841,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). :type account_name: object @@ -3383,6 +3876,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, @@ -3394,11 +3888,12 @@ def __init__( **kwargs ): super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeStore' + self.type = 'AzureDataLakeStore' # type: str self.data_lake_store_uri = kwargs['data_lake_store_uri'] self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.account_name = kwargs.get('account_name', None) self.subscription_id = kwargs.get('subscription_id', None) self.resource_group_name = kwargs.get('resource_group_name', None) @@ -3439,7 +3934,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreLocation, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreLocation' + self.type = 'AzureDataLakeStoreLocation' # type: str class AzureDataLakeStoreReadSettings(StoreReadSettings): @@ -3518,7 +4013,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreReadSettings' + self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -3584,7 +4079,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreSink, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreSink' + self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) @@ -3631,7 +4126,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreSource, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreSource' + self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = kwargs.get('recursive', None) @@ -3673,7 +4168,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreWriteSettings' + self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = kwargs.get('expiry_date_time', None) @@ -3715,6 +4210,9 @@ class AzureFileStorageLinkedService(LinkedService): :param file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). :type file_share: object + :param snapshot: The azure file share snapshot version. Type: string (or Expression with + resultType string). + :type snapshot: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3740,6 +4238,7 @@ class AzureFileStorageLinkedService(LinkedService): 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, + 'snapshot': {'key': 'typeProperties.snapshot', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -3748,7 +4247,7 @@ def __init__( **kwargs ): super(AzureFileStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureFileStorage' + self.type = 'AzureFileStorage' # type: str self.host = kwargs.get('host', None) self.user_id = kwargs.get('user_id', None) self.password = kwargs.get('password', None) @@ -3757,6 +4256,7 @@ def __init__( self.sas_uri = kwargs.get('sas_uri', None) self.sas_token = kwargs.get('sas_token', None) self.file_share = kwargs.get('file_share', None) + self.snapshot = kwargs.get('snapshot', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -3794,7 +4294,7 @@ def __init__( **kwargs ): super(AzureFileStorageLocation, self).__init__(**kwargs) - self.type = 'AzureFileStorageLocation' + self.type = 'AzureFileStorageLocation' # type: str class AzureFileStorageReadSettings(StoreReadSettings): @@ -3867,7 +4367,7 @@ def __init__( **kwargs ): super(AzureFileStorageReadSettings, self).__init__(**kwargs) - self.type = 'AzureFileStorageReadSettings' + self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -3880,6 +4380,42 @@ def __init__( self.modified_datetime_end = kwargs.get('modified_datetime_end', None) +class AzureFileStorageWriteSettings(StoreWriteSettings): + """Azure File Storage write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureFileStorageWriteSettings, self).__init__(**kwargs) + self.type = 'AzureFileStorageWriteSettings' # type: str + + class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. @@ -3944,7 +4480,7 @@ def __init__( **kwargs ): super(AzureFunctionActivity, self).__init__(**kwargs) - self.type = 'AzureFunctionActivity' + self.type = 'AzureFunctionActivity' # type: str self.method = kwargs['method'] self.function_name = kwargs['function_name'] self.headers = kwargs.get('headers', None) @@ -4002,7 +4538,7 @@ def __init__( **kwargs ): super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.type = 'AzureFunction' + self.type = 'AzureFunction' # type: str self.function_app_url = kwargs['function_app_url'] self.function_key = kwargs.get('function_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -4051,7 +4587,7 @@ def __init__( **kwargs ): super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.type = 'AzureKeyVault' + self.type = 'AzureKeyVault' # type: str self.base_url = kwargs['base_url'] @@ -4084,7 +4620,7 @@ def __init__( **kwargs ): super(SecretBase, self).__init__(**kwargs) - self.type = None + self.type = None # type: Optional[str] class AzureKeyVaultSecretReference(SecretBase): @@ -4122,7 +4658,7 @@ def __init__( **kwargs ): super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type = 'AzureKeyVaultSecret' + self.type = 'AzureKeyVaultSecret' # type: str self.store = kwargs['store'] self.secret_name = kwargs['secret_name'] self.secret_version = kwargs.get('secret_version', None) @@ -4178,7 +4714,7 @@ def __init__( **kwargs ): super(AzureMariaDBLinkedService, self).__init__(**kwargs) - self.type = 'AzureMariaDB' + self.type = 'AzureMariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -4234,7 +4770,7 @@ def __init__( **kwargs ): super(AzureMariaDBSource, self).__init__(**kwargs) - self.type = 'AzureMariaDBSource' + self.type = 'AzureMariaDBSource' # type: str self.query = kwargs.get('query', None) @@ -4292,7 +4828,7 @@ def __init__( **kwargs ): super(AzureMariaDBTableDataset, self).__init__(**kwargs) - self.type = 'AzureMariaDBTable' + self.type = 'AzureMariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -4361,7 +4897,7 @@ def __init__( **kwargs ): super(AzureMlBatchExecutionActivity, self).__init__(**kwargs) - self.type = 'AzureMLBatchExecution' + self.type = 'AzureMLBatchExecution' # type: str self.global_parameters = kwargs.get('global_parameters', None) self.web_service_outputs = kwargs.get('web_service_outputs', None) self.web_service_inputs = kwargs.get('web_service_inputs', None) @@ -4439,7 +4975,7 @@ def __init__( **kwargs ): super(AzureMlExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'AzureMLExecutePipeline' + self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = kwargs['ml_pipeline_id'] self.experiment_name = kwargs.get('experiment_name', None) self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) @@ -4516,7 +5052,7 @@ def __init__( **kwargs ): super(AzureMlLinkedService, self).__init__(**kwargs) - self.type = 'AzureML' + self.type = 'AzureML' # type: str self.ml_endpoint = kwargs['ml_endpoint'] self.api_key = kwargs['api_key'] self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) @@ -4597,7 +5133,7 @@ def __init__( **kwargs ): super(AzureMlServiceLinkedService, self).__init__(**kwargs) - self.type = 'AzureMLService' + self.type = 'AzureMLService' # type: str self.subscription_id = kwargs['subscription_id'] self.resource_group_name = kwargs['resource_group_name'] self.ml_workspace_name = kwargs['ml_workspace_name'] @@ -4669,7 +5205,7 @@ def __init__( **kwargs ): super(AzureMlUpdateResourceActivity, self).__init__(**kwargs) - self.type = 'AzureMLUpdateResource' + self.type = 'AzureMLUpdateResource' # type: str self.trained_model_name = kwargs['trained_model_name'] self.trained_model_linked_service_name = kwargs['trained_model_linked_service_name'] self.trained_model_file_path = kwargs['trained_model_file_path'] @@ -4707,7 +5243,7 @@ def __init__( self.linked_service_name = kwargs['linked_service_name'] -class AzureMySqlLinkedService(LinkedService): +class AzureMySQLLinkedService(LinkedService): """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. @@ -4757,14 +5293,14 @@ def __init__( self, **kwargs ): - super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.type = 'AzureMySql' + super(AzureMySQLLinkedService, self).__init__(**kwargs) + self.type = 'AzureMySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMySqlSink(CopySink): +class AzureMySQLSink(CopySink): """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -4813,12 +5349,12 @@ def __init__( self, **kwargs ): - super(AzureMySqlSink, self).__init__(**kwargs) - self.type = 'AzureMySqlSink' + super(AzureMySQLSink, self).__init__(**kwargs) + self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) -class AzureMySqlSource(TabularSource): +class AzureMySQLSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -4866,12 +5402,12 @@ def __init__( self, **kwargs ): - super(AzureMySqlSource, self).__init__(**kwargs) - self.type = 'AzureMySqlSource' + super(AzureMySQLSource, self).__init__(**kwargs) + self.type = 'AzureMySqlSource' # type: str self.query = kwargs.get('query', None) -class AzureMySqlTableDataset(Dataset): +class AzureMySQLTableDataset(Dataset): """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -4929,13 +5465,13 @@ def __init__( self, **kwargs ): - super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureMySqlTable' + super(AzureMySQLTableDataset, self).__init__(**kwargs) + self.type = 'AzureMySqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) -class AzurePostgreSqlLinkedService(LinkedService): +class AzurePostgreSQLLinkedService(LinkedService): """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -4984,14 +5520,14 @@ def __init__( self, **kwargs ): - super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'AzurePostgreSql' + super(AzurePostgreSQLLinkedService, self).__init__(**kwargs) + self.type = 'AzurePostgreSql' # type: str self.connection_string = kwargs.get('connection_string', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzurePostgreSqlSink(CopySink): +class AzurePostgreSQLSink(CopySink): """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. @@ -5040,12 +5576,12 @@ def __init__( self, **kwargs ): - super(AzurePostgreSqlSink, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlSink' + super(AzurePostgreSQLSink, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) -class AzurePostgreSqlSource(TabularSource): +class AzurePostgreSQLSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -5094,12 +5630,12 @@ def __init__( self, **kwargs ): - super(AzurePostgreSqlSource, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlSource' + super(AzurePostgreSQLSource, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlSource' # type: str self.query = kwargs.get('query', None) -class AzurePostgreSqlTableDataset(Dataset): +class AzurePostgreSQLTableDataset(Dataset): """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -5161,8 +5697,8 @@ def __init__( self, **kwargs ): - super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlTable' + super(AzurePostgreSQLTableDataset, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -5214,7 +5750,7 @@ def __init__( **kwargs ): super(AzureQueueSink, self).__init__(**kwargs) - self.type = 'AzureQueueSink' + self.type = 'AzureQueueSink' # type: str class AzureSearchIndexDataset(Dataset): @@ -5273,7 +5809,7 @@ def __init__( **kwargs ): super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.type = 'AzureSearchIndex' + self.type = 'AzureSearchIndex' # type: str self.index_name = kwargs['index_name'] @@ -5328,7 +5864,7 @@ def __init__( **kwargs ): super(AzureSearchIndexSink, self).__init__(**kwargs) - self.type = 'AzureSearchIndexSink' + self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -5383,13 +5919,13 @@ def __init__( **kwargs ): super(AzureSearchLinkedService, self).__init__(**kwargs) - self.type = 'AzureSearch' + self.type = 'AzureSearch' # type: str self.url = kwargs['url'] self.key = kwargs.get('key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSqlDatabaseLinkedService(LinkedService): +class AzureSQLDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. @@ -5421,6 +5957,10 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5444,6 +5984,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5451,17 +5992,18 @@ def __init__( self, **kwargs ): - super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDatabase' + super(AzureSQLDatabaseLinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlDatabase' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSqlDWLinkedService(LinkedService): +class AzureSQLDWLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. @@ -5493,6 +6035,10 @@ class AzureSqlDWLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5516,6 +6062,7 @@ class AzureSqlDWLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5523,17 +6070,18 @@ def __init__( self, **kwargs ): - super(AzureSqlDWLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDW' + super(AzureSQLDWLinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlDW' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSqlDWTableDataset(Dataset): +class AzureSQLDWTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. @@ -5595,14 +6143,14 @@ def __init__( self, **kwargs ): - super(AzureSqlDWTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlDWTable' + super(AzureSQLDWTableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlDWTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class AzureSqlMiLinkedService(LinkedService): +class AzureSQLMiLinkedService(LinkedService): """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. @@ -5634,6 +6182,10 @@ class AzureSqlMiLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5657,6 +6209,7 @@ class AzureSqlMiLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5664,17 +6217,18 @@ def __init__( self, **kwargs ): - super(AzureSqlMiLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlMI' + super(AzureSQLMiLinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlMI' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSqlMiTableDataset(Dataset): +class AzureSQLMiTableDataset(Dataset): """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. @@ -5736,14 +6290,14 @@ def __init__( self, **kwargs ): - super(AzureSqlMiTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlMITable' + super(AzureSQLMiTableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlMITable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class AzureSqlSink(CopySink): +class AzureSQLSink(CopySink): """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. @@ -5812,8 +6366,8 @@ def __init__( self, **kwargs ): - super(AzureSqlSink, self).__init__(**kwargs) - self.type = 'AzureSqlSink' + super(AzureSQLSink, self).__init__(**kwargs) + self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -5822,7 +6376,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class AzureSqlSource(TabularSource): +class AzureSQLSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -5861,9 +6415,9 @@ class AzureSqlSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -5883,15 +6437,15 @@ class AzureSqlSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( self, **kwargs ): - super(AzureSqlSource, self).__init__(**kwargs) - self.type = 'AzureSqlSource' + super(AzureSQLSource, self).__init__(**kwargs) + self.type = 'AzureSqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) @@ -5900,7 +6454,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class AzureSqlTableDataset(Dataset): +class AzureSQLTableDataset(Dataset): """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. @@ -5962,8 +6516,8 @@ def __init__( self, **kwargs ): - super(AzureSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlTable' + super(AzureSQLTableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -6026,7 +6580,7 @@ def __init__( **kwargs ): super(AzureStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureStorage' + self.type = 'AzureStorage' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_key = kwargs.get('account_key', None) self.sas_uri = kwargs.get('sas_uri', None) @@ -6090,7 +6644,7 @@ def __init__( **kwargs ): super(AzureTableDataset, self).__init__(**kwargs) - self.type = 'AzureTable' + self.type = 'AzureTable' # type: str self.table_name = kwargs['table_name'] @@ -6156,7 +6710,7 @@ def __init__( **kwargs ): super(AzureTableSink, self).__init__(**kwargs) - self.type = 'AzureTableSink' + self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) @@ -6217,7 +6771,7 @@ def __init__( **kwargs ): super(AzureTableSource, self).__init__(**kwargs) - self.type = 'AzureTableSource' + self.type = 'AzureTableSource' # type: str self.azure_table_source_query = kwargs.get('azure_table_source_query', None) self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) @@ -6279,7 +6833,7 @@ def __init__( **kwargs ): super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureTableStorage' + self.type = 'AzureTableStorage' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_key = kwargs.get('account_key', None) self.sas_uri = kwargs.get('sas_uri', None) @@ -6344,7 +6898,7 @@ def __init__( **kwargs ): super(BinaryDataset, self).__init__(**kwargs) - self.type = 'Binary' + self.type = 'Binary' # type: str self.location = kwargs.get('location', None) self.compression = kwargs.get('compression', None) @@ -6383,7 +6937,7 @@ def __init__( ): super(FormatReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'FormatReadSettings' + self.type = 'FormatReadSettings' # type: str class BinaryReadSettings(FormatReadSettings): @@ -6415,7 +6969,7 @@ def __init__( **kwargs ): super(BinaryReadSettings, self).__init__(**kwargs) - self.type = 'BinaryReadSettings' + self.type = 'BinaryReadSettings' # type: str self.compression_properties = kwargs.get('compression_properties', None) @@ -6468,7 +7022,7 @@ def __init__( **kwargs ): super(BinarySink, self).__init__(**kwargs) - self.type = 'BinarySink' + self.type = 'BinarySink' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -6516,7 +7070,7 @@ def __init__( **kwargs ): super(BinarySource, self).__init__(**kwargs) - self.type = 'BinarySource' + self.type = 'BinarySource' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) @@ -6568,7 +7122,7 @@ def __init__( ): super(Trigger, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Trigger' + self.type = 'Trigger' # type: str self.description = kwargs.get('description', None) self.runtime_state = None self.annotations = kwargs.get('annotations', None) @@ -6623,7 +7177,7 @@ def __init__( **kwargs ): super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.type = 'MultiplePipelineTrigger' + self.type = 'MultiplePipelineTrigger' # type: str self.pipelines = kwargs.get('pipelines', None) @@ -6691,7 +7245,7 @@ def __init__( **kwargs ): super(BlobEventsTrigger, self).__init__(**kwargs) - self.type = 'BlobEventsTrigger' + self.type = 'BlobEventsTrigger' # type: str self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) self.ignore_empty_blobs = kwargs.get('ignore_empty_blobs', None) @@ -6760,7 +7314,7 @@ def __init__( **kwargs ): super(BlobSink, self).__init__(**kwargs) - self.type = 'BlobSink' + self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) @@ -6817,7 +7371,7 @@ def __init__( **kwargs ): super(BlobSource, self).__init__(**kwargs) - self.type = 'BlobSource' + self.type = 'BlobSource' # type: str self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) self.skip_header_line_count = kwargs.get('skip_header_line_count', None) self.recursive = kwargs.get('recursive', None) @@ -6878,7 +7432,7 @@ def __init__( **kwargs ): super(BlobTrigger, self).__init__(**kwargs) - self.type = 'BlobTrigger' + self.type = 'BlobTrigger' # type: str self.folder_path = kwargs['folder_path'] self.max_concurrency = kwargs['max_concurrency'] self.linked_service = kwargs['linked_service'] @@ -6947,7 +7501,7 @@ def __init__( **kwargs ): super(CassandraLinkedService, self).__init__(**kwargs) - self.type = 'Cassandra' + self.type = 'Cassandra' # type: str self.host = kwargs['host'] self.authentication_type = kwargs.get('authentication_type', None) self.port = kwargs.get('port', None) @@ -7015,7 +7569,7 @@ def __init__( **kwargs ): super(CassandraSource, self).__init__(**kwargs) - self.type = 'CassandraSource' + self.type = 'CassandraSource' # type: str self.query = kwargs.get('query', None) self.consistency_level = kwargs.get('consistency_level', None) @@ -7079,7 +7633,7 @@ def __init__( **kwargs ): super(CassandraTableDataset, self).__init__(**kwargs) - self.type = 'CassandraTable' + self.type = 'CassandraTable' # type: str self.table_name = kwargs.get('table_name', None) self.keyspace = kwargs.get('keyspace', None) @@ -7137,7 +7691,7 @@ def __init__( **kwargs ): super(ChainingTrigger, self).__init__(**kwargs) - self.type = 'ChainingTrigger' + self.type = 'ChainingTrigger' # type: str self.pipeline = kwargs['pipeline'] self.depends_on = kwargs['depends_on'] self.run_dimension = kwargs['run_dimension'] @@ -7181,38 +7735,6 @@ def __init__( self.details = kwargs.get('details', None) -class CustomSetupBase(msrest.serialization.Model): - """The base definition of the custom setup. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} - } - - def __init__( - self, - **kwargs - ): - super(CustomSetupBase, self).__init__(**kwargs) - self.type = None - - class CmdkeySetup(CustomSetupBase): """The custom setup of running cmdkey commands. @@ -7247,7 +7769,7 @@ def __init__( **kwargs ): super(CmdkeySetup, self).__init__(**kwargs) - self.type = 'CmdkeySetup' + self.type = 'CmdkeySetup' # type: str self.target_name = kwargs['target_name'] self.user_name = kwargs['user_name'] self.password = kwargs['password'] @@ -7308,7 +7830,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsEntity' + self.type = 'CommonDataServiceForAppsEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -7417,7 +7939,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.type = 'CommonDataServiceForApps' + self.type = 'CommonDataServiceForApps' # type: str self.deployment_type = kwargs['deployment_type'] self.host_name = kwargs.get('host_name', None) self.port = kwargs.get('port', None) @@ -7435,8 +7957,6 @@ def __init__( class CommonDataServiceForAppsSink(CopySink): """A copy activity Common Data Service for Apps sink. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -7459,8 +7979,9 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert". - :vartype write_behavior: str + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -7472,7 +7993,7 @@ class CommonDataServiceForAppsSink(CopySink): _validation = { 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, + 'write_behavior': {'required': True}, } _attribute_map = { @@ -7488,14 +8009,13 @@ class CommonDataServiceForAppsSink(CopySink): 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } - write_behavior = "Upsert" - def __init__( self, **kwargs ): super(CommonDataServiceForAppsSink, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsSink' + self.type = 'CommonDataServiceForAppsSink' # type: str + self.write_behavior = kwargs['write_behavior'] self.ignore_null_values = kwargs.get('ignore_null_values', None) self.alternate_key_name = kwargs.get('alternate_key_name', None) @@ -7546,7 +8066,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsSource, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsSource' + self.type = 'CommonDataServiceForAppsSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -7580,7 +8100,7 @@ def __init__( **kwargs ): super(ComponentSetup, self).__init__(**kwargs) - self.type = 'ComponentSetup' + self.type = 'ComponentSetup' # type: str self.component_name = kwargs['component_name'] self.license_key = kwargs.get('license_key', None) @@ -7589,7 +8109,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -7610,7 +8130,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -7619,7 +8139,7 @@ def __init__( ): super(CompressionReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CompressionReadSettings' + self.type = 'CompressionReadSettings' # type: str class ConcurLinkedService(LinkedService): @@ -7640,6 +8160,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -7676,6 +8199,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -7690,7 +8214,8 @@ def __init__( **kwargs ): super(ConcurLinkedService, self).__init__(**kwargs) - self.type = 'Concur' + self.type = 'Concur' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.client_id = kwargs['client_id'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -7754,7 +8279,7 @@ def __init__( **kwargs ): super(ConcurObjectDataset, self).__init__(**kwargs) - self.type = 'ConcurObject' + self.type = 'ConcurObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -7808,10 +8333,45 @@ def __init__( **kwargs ): super(ConcurSource, self).__init__(**kwargs) - self.type = 'ConcurSource' + self.type = 'ConcurSource' # type: str self.query = kwargs.get('query', None) +class ConnectionStateProperties(msrest.serialization.Model): + """The connection state of a managed private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar actions_required: The actions required on the managed private endpoint. + :vartype actions_required: str + :ivar description: The managed private endpoint description. + :vartype description: str + :ivar status: The approval status. + :vartype status: str + """ + + _validation = { + 'actions_required': {'readonly': True}, + 'description': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ConnectionStateProperties, self).__init__(**kwargs) + self.actions_required = None + self.description = None + self.status = None + + class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. @@ -7851,7 +8411,7 @@ def __init__( **kwargs ): super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' + self.type = 'Container' # type: str class CopyActivity(ExecutionActivity): @@ -7905,9 +8465,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -7947,6 +8509,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -7958,7 +8521,7 @@ def __init__( **kwargs ): super(CopyActivity, self).__init__(**kwargs) - self.type = 'Copy' + self.type = 'Copy' # type: str self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.source = kwargs['source'] @@ -7971,12 +8534,38 @@ def __init__( self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.log_settings = kwargs.get('log_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) self.validate_data_consistency = kwargs.get('validate_data_consistency', None) self.skip_error_file = kwargs.get('skip_error_file', None) +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -8011,7 +8600,7 @@ def __init__( ): super(CopyTranslator, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopyTranslator' + self.type = 'CopyTranslator' # type: str class CosmosDBLinkedService(LinkedService): @@ -8072,7 +8661,7 @@ def __init__( **kwargs ): super(CosmosDBLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDb' + self.type = 'CosmosDb' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_endpoint = kwargs.get('account_endpoint', None) self.database = kwargs.get('database', None) @@ -8136,7 +8725,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiCollection' + self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = kwargs['collection'] @@ -8189,7 +8778,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApi' + self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] @@ -8245,7 +8834,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiSink, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiSink' + self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -8309,7 +8898,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiSource, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiSource' + self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) @@ -8317,7 +8906,7 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) -class CosmosDBSqlApiCollectionDataset(Dataset): +class CosmosDBSQLApiCollectionDataset(Dataset): """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -8372,12 +8961,12 @@ def __init__( self, **kwargs ): - super(CosmosDBSqlApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiCollection' + super(CosmosDBSQLApiCollectionDataset, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = kwargs['collection_name'] -class CosmosDBSqlApiSink(CopySink): +class CosmosDBSQLApiSink(CopySink): """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. @@ -8426,12 +9015,12 @@ def __init__( self, **kwargs ): - super(CosmosDBSqlApiSink, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiSink' + super(CosmosDBSQLApiSink, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) -class CosmosDBSqlApiSource(CopySource): +class CosmosDBSQLApiSource(CopySource): """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -8458,6 +9047,9 @@ class CosmosDBSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + Expression with resultType boolean). + :type detect_datetime: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] @@ -8476,6 +9068,7 @@ class CosmosDBSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -8483,11 +9076,12 @@ def __init__( self, **kwargs ): - super(CosmosDBSqlApiSource, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiSource' + super(CosmosDBSQLApiSource, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiSource' # type: str self.query = kwargs.get('query', None) self.page_size = kwargs.get('page_size', None) self.preferred_regions = kwargs.get('preferred_regions', None) + self.detect_datetime = kwargs.get('detect_datetime', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -8541,7 +9135,7 @@ def __init__( **kwargs ): super(CouchbaseLinkedService, self).__init__(**kwargs) - self.type = 'Couchbase' + self.type = 'Couchbase' # type: str self.connection_string = kwargs.get('connection_string', None) self.cred_string = kwargs.get('cred_string', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -8597,7 +9191,7 @@ def __init__( **kwargs ): super(CouchbaseSource, self).__init__(**kwargs) - self.type = 'CouchbaseSource' + self.type = 'CouchbaseSource' # type: str self.query = kwargs.get('query', None) @@ -8655,7 +9249,7 @@ def __init__( **kwargs ): super(CouchbaseTableDataset, self).__init__(**kwargs) - self.type = 'CouchbaseTable' + self.type = 'CouchbaseTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -8847,7 +9441,7 @@ def __init__( **kwargs ): super(CustomActivity, self).__init__(**kwargs) - self.type = 'Custom' + self.type = 'Custom' # type: str self.command = kwargs['command'] self.resource_linked_service = kwargs.get('resource_linked_service', None) self.folder_path = kwargs.get('folder_path', None) @@ -8933,7 +9527,7 @@ def __init__( **kwargs ): super(CustomDataset, self).__init__(**kwargs) - self.type = 'CustomDataset' + self.type = 'CustomDataset' # type: str self.type_properties = kwargs.get('type_properties', None) @@ -8979,7 +9573,7 @@ def __init__( **kwargs ): super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type = 'CustomDataSource' + self.type = 'CustomDataSource' # type: str self.type_properties = kwargs['type_properties'] @@ -9041,7 +9635,7 @@ def __init__( **kwargs ): super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.type = 'DatabricksNotebook' + self.type = 'DatabricksNotebook' # type: str self.notebook_path = kwargs['notebook_path'] self.base_parameters = kwargs.get('base_parameters', None) self.libraries = kwargs.get('libraries', None) @@ -9104,7 +9698,7 @@ def __init__( **kwargs ): super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkJar' + self.type = 'DatabricksSparkJar' # type: str self.main_class_name = kwargs['main_class_name'] self.parameters = kwargs.get('parameters', None) self.libraries = kwargs.get('libraries', None) @@ -9166,7 +9760,7 @@ def __init__( **kwargs ): super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkPython' + self.type = 'DatabricksSparkPython' # type: str self.python_file = kwargs['python_file'] self.parameters = kwargs.get('parameters', None) self.libraries = kwargs.get('libraries', None) @@ -9205,7 +9799,7 @@ def __init__( **kwargs ): super(DataFlow, self).__init__(**kwargs) - self.type = None + self.type = None # type: Optional[str] self.description = kwargs.get('description', None) self.annotations = kwargs.get('annotations', None) self.folder = kwargs.get('folder', None) @@ -9319,8 +9913,9 @@ class DataFlowDebugPackage(msrest.serialization.Model): :type parameters_debug_settings_parameters: dict[str, object] :param dataset_parameters: Parameters for dataset. :type dataset_parameters: object - :param folder_path: Folder path for staging blob. - :type folder_path: str + :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :type folder_path: object :ivar type: Linked service reference type. Default value: "LinkedServiceReference". :vartype type: str :param reference_name: Reference LinkedService name. @@ -9345,7 +9940,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): 'source_settings': {'key': 'debugSettings.sourceSettings', 'type': '[DataFlowSourceSetting]'}, 'parameters_debug_settings_parameters': {'key': 'debugSettings.parameters', 'type': '{object}'}, 'dataset_parameters': {'key': 'debugSettings.datasetParameters', 'type': 'object'}, - 'folder_path': {'key': 'staging.folderPath', 'type': 'str'}, + 'folder_path': {'key': 'staging.folderPath', 'type': 'object'}, 'type': {'key': 'staging.linkedService.type', 'type': 'str'}, 'reference_name': {'key': 'staging.linkedService.referenceName', 'type': 'str'}, 'parameters_staging_linked_service_parameters': {'key': 'staging.linkedService.parameters', 'type': '{object}'}, @@ -9790,8 +10385,9 @@ class DataFlowStagingInfo(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param folder_path: Folder path for staging blob. - :type folder_path: str + :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :type folder_path: object :ivar type: Linked service reference type. Default value: "LinkedServiceReference". :vartype type: str :param reference_name: Reference LinkedService name. @@ -9805,7 +10401,7 @@ class DataFlowStagingInfo(msrest.serialization.Model): } _attribute_map = { - 'folder_path': {'key': 'folderPath', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'type': {'key': 'linkedService.type', 'type': 'str'}, 'reference_name': {'key': 'linkedService.referenceName', 'type': 'str'}, 'parameters': {'key': 'linkedService.parameters', 'type': '{object}'}, @@ -9897,7 +10493,7 @@ def __init__( **kwargs ): super(DataLakeAnalyticsUsqlActivity, self).__init__(**kwargs) - self.type = 'DataLakeAnalyticsU-SQL' + self.type = 'DataLakeAnalyticsU-SQL' # type: str self.script_path = kwargs['script_path'] self.script_linked_service = kwargs['script_linked_service'] self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) @@ -9911,7 +10507,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -9932,7 +10528,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -9941,7 +10537,7 @@ def __init__( ): super(DatasetCompression, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetCompression' + self.type = 'DatasetCompression' # type: str class DatasetBZip2Compression(DatasetCompression): @@ -9970,7 +10566,7 @@ def __init__( **kwargs ): super(DatasetBZip2Compression, self).__init__(**kwargs) - self.type = 'BZip2' + self.type = 'BZip2' # type: str class DatasetDataElement(msrest.serialization.Model): @@ -10053,7 +10649,7 @@ def __init__( **kwargs ): super(DatasetDeflateCompression, self).__init__(**kwargs) - self.type = 'Deflate' + self.type = 'Deflate' # type: str self.level = kwargs.get('level', None) @@ -10105,7 +10701,7 @@ def __init__( **kwargs ): super(DatasetGZipCompression, self).__init__(**kwargs) - self.type = 'GZip' + self.type = 'GZip' # type: str self.level = kwargs.get('level', None) @@ -10246,6 +10842,68 @@ def __init__( self.type = kwargs.get('type', None) +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.type = 'TarGZip' # type: str + self.level = kwargs.get('level', None) + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -10275,15 +10933,13 @@ def __init__( **kwargs ): super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.type = 'ZipDeflate' + self.type = 'ZipDeflate' # type: str self.level = kwargs.get('level', None) class Db2LinkedService(LinkedService): """Linked service for DB2 data source. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -10309,9 +10965,9 @@ class Db2LinkedService(LinkedService): :param database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type database: object - :ivar authentication_type: AuthenticationType to be used for connection. It is mutually - exclusive with connectionString property. Default value: "Basic". - :vartype authentication_type: str + :param authentication_type: AuthenticationType to be used for connection. It is mutually + exclusive with connectionString property. Possible values include: "Basic". + :type authentication_type: str or ~data_factory_management_client.models.Db2AuthenticationType :param username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type username: object @@ -10332,7 +10988,6 @@ class Db2LinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'authentication_type': {'constant': True}, } _attribute_map = { @@ -10353,17 +11008,16 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - authentication_type = "Basic" - def __init__( self, **kwargs ): super(Db2LinkedService, self).__init__(**kwargs) - self.type = 'Db2' + self.type = 'Db2' # type: str self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.database = kwargs.get('database', None) + self.authentication_type = kwargs.get('authentication_type', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.package_collection = kwargs.get('package_collection', None) @@ -10420,7 +11074,7 @@ def __init__( **kwargs ): super(Db2Source, self).__init__(**kwargs) - self.type = 'Db2Source' + self.type = 'Db2Source' # type: str self.query = kwargs.get('query', None) @@ -10486,7 +11140,7 @@ def __init__( **kwargs ): super(Db2TableDataset, self).__init__(**kwargs) - self.type = 'Db2Table' + self.type = 'Db2Table' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -10561,7 +11215,7 @@ def __init__( **kwargs ): super(DeleteActivity, self).__init__(**kwargs) - self.type = 'Delete' + self.type = 'Delete' # type: str self.recursive = kwargs.get('recursive', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.enable_logging = kwargs.get('enable_logging', None) @@ -10630,7 +11284,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -10679,7 +11333,7 @@ def __init__( **kwargs ): super(DelimitedTextDataset, self).__init__(**kwargs) - self.type = 'DelimitedText' + self.type = 'DelimitedText' # type: str self.location = kwargs.get('location', None) self.column_delimiter = kwargs.get('column_delimiter', None) self.row_delimiter = kwargs.get('row_delimiter', None) @@ -10725,7 +11379,7 @@ def __init__( **kwargs ): super(DelimitedTextReadSettings, self).__init__(**kwargs) - self.type = 'DelimitedTextReadSettings' + self.type = 'DelimitedTextReadSettings' # type: str self.skip_line_count = kwargs.get('skip_line_count', None) self.compression_properties = kwargs.get('compression_properties', None) @@ -10782,7 +11436,7 @@ def __init__( **kwargs ): super(DelimitedTextSink, self).__init__(**kwargs) - self.type = 'DelimitedTextSink' + self.type = 'DelimitedTextSink' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) @@ -10835,7 +11489,7 @@ def __init__( **kwargs ): super(DelimitedTextSource, self).__init__(**kwargs) - self.type = 'DelimitedTextSource' + self.type = 'DelimitedTextSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -10857,6 +11511,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -10869,6 +11530,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -10876,9 +11539,11 @@ def __init__( **kwargs ): super(DelimitedTextWriteSettings, self).__init__(**kwargs) - self.type = 'DelimitedTextWriteSettings' + self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs['file_extension'] + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class DependencyReference(msrest.serialization.Model): @@ -10910,7 +11575,7 @@ def __init__( **kwargs ): super(DependencyReference, self).__init__(**kwargs) - self.type = None + self.type = None # type: Optional[str] class DistcpSettings(msrest.serialization.Model): @@ -11007,7 +11672,7 @@ def __init__( **kwargs ): super(DocumentDBCollectionDataset, self).__init__(**kwargs) - self.type = 'DocumentDbCollection' + self.type = 'DocumentDbCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -11065,7 +11730,7 @@ def __init__( **kwargs ): super(DocumentDBCollectionSink, self).__init__(**kwargs) - self.type = 'DocumentDbCollectionSink' + self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = kwargs.get('nesting_separator', None) self.write_behavior = kwargs.get('write_behavior', None) @@ -11123,7 +11788,7 @@ def __init__( **kwargs ): super(DocumentDBCollectionSource, self).__init__(**kwargs) - self.type = 'DocumentDbCollectionSource' + self.type = 'DocumentDbCollectionSource' # type: str self.query = kwargs.get('query', None) self.nesting_separator = kwargs.get('nesting_separator', None) self.query_timeout = kwargs.get('query_timeout', None) @@ -11180,7 +11845,7 @@ def __init__( **kwargs ): super(DrillLinkedService, self).__init__(**kwargs) - self.type = 'Drill' + self.type = 'Drill' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -11236,7 +11901,7 @@ def __init__( **kwargs ): super(DrillSource, self).__init__(**kwargs) - self.type = 'DrillSource' + self.type = 'DrillSource' # type: str self.query = kwargs.get('query', None) @@ -11302,7 +11967,7 @@ def __init__( **kwargs ): super(DrillTableDataset, self).__init__(**kwargs) - self.type = 'DrillTable' + self.type = 'DrillTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -11430,7 +12095,7 @@ def __init__( **kwargs ): super(DynamicsAxLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsAX' + self.type = 'DynamicsAX' # type: str self.url = kwargs['url'] self.service_principal_id = kwargs['service_principal_id'] self.service_principal_key = kwargs['service_principal_key'] @@ -11495,7 +12160,7 @@ def __init__( **kwargs ): super(DynamicsAxResourceDataset, self).__init__(**kwargs) - self.type = 'DynamicsAXResource' + self.type = 'DynamicsAXResource' # type: str self.path = kwargs['path'] @@ -11555,7 +12220,7 @@ def __init__( **kwargs ): super(DynamicsAxSource, self).__init__(**kwargs) - self.type = 'DynamicsAXSource' + self.type = 'DynamicsAXSource' # type: str self.query = kwargs.get('query', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) @@ -11615,7 +12280,7 @@ def __init__( **kwargs ): super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsCrmEntity' + self.type = 'DynamicsCrmEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -11722,7 +12387,7 @@ def __init__( **kwargs ): super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsCrm' + self.type = 'DynamicsCrm' # type: str self.deployment_type = kwargs['deployment_type'] self.host_name = kwargs.get('host_name', None) self.port = kwargs.get('port', None) @@ -11740,8 +12405,6 @@ def __init__( class DynamicsCrmSink(CopySink): """A copy activity Dynamics CRM sink. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -11764,8 +12427,9 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert". - :vartype write_behavior: str + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -11777,7 +12441,7 @@ class DynamicsCrmSink(CopySink): _validation = { 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, + 'write_behavior': {'required': True}, } _attribute_map = { @@ -11793,14 +12457,13 @@ class DynamicsCrmSink(CopySink): 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } - write_behavior = "Upsert" - def __init__( self, **kwargs ): super(DynamicsCrmSink, self).__init__(**kwargs) - self.type = 'DynamicsCrmSink' + self.type = 'DynamicsCrmSink' # type: str + self.write_behavior = kwargs['write_behavior'] self.ignore_null_values = kwargs.get('ignore_null_values', None) self.alternate_key_name = kwargs.get('alternate_key_name', None) @@ -11851,7 +12514,7 @@ def __init__( **kwargs ): super(DynamicsCrmSource, self).__init__(**kwargs) - self.type = 'DynamicsCrmSource' + self.type = 'DynamicsCrmSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -11911,7 +12574,7 @@ def __init__( **kwargs ): super(DynamicsEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsEntity' + self.type = 'DynamicsEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -12015,7 +12678,7 @@ def __init__( **kwargs ): super(DynamicsLinkedService, self).__init__(**kwargs) - self.type = 'Dynamics' + self.type = 'Dynamics' # type: str self.deployment_type = kwargs['deployment_type'] self.host_name = kwargs.get('host_name', None) self.port = kwargs.get('port', None) @@ -12033,8 +12696,6 @@ def __init__( class DynamicsSink(CopySink): """A copy activity Dynamics sink. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -12057,8 +12718,9 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert". - :vartype write_behavior: str + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -12070,7 +12732,7 @@ class DynamicsSink(CopySink): _validation = { 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, + 'write_behavior': {'required': True}, } _attribute_map = { @@ -12086,14 +12748,13 @@ class DynamicsSink(CopySink): 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } - write_behavior = "Upsert" - def __init__( self, **kwargs ): super(DynamicsSink, self).__init__(**kwargs) - self.type = 'DynamicsSink' + self.type = 'DynamicsSink' # type: str + self.write_behavior = kwargs['write_behavior'] self.ignore_null_values = kwargs.get('ignore_null_values', None) self.alternate_key_name = kwargs.get('alternate_key_name', None) @@ -12144,7 +12805,7 @@ def __init__( **kwargs ): super(DynamicsSource, self).__init__(**kwargs) - self.type = 'DynamicsSource' + self.type = 'DynamicsSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -12217,7 +12878,7 @@ def __init__( **kwargs ): super(EloquaLinkedService, self).__init__(**kwargs) - self.type = 'Eloqua' + self.type = 'Eloqua' # type: str self.endpoint = kwargs['endpoint'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -12281,7 +12942,7 @@ def __init__( **kwargs ): super(EloquaObjectDataset, self).__init__(**kwargs) - self.type = 'EloquaObject' + self.type = 'EloquaObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -12335,7 +12996,7 @@ def __init__( **kwargs ): super(EloquaSource, self).__init__(**kwargs) - self.type = 'EloquaSource' + self.type = 'EloquaSource' # type: str self.query = kwargs.get('query', None) @@ -12393,7 +13054,7 @@ def __init__( **kwargs ): super(EnvironmentVariableSetup, self).__init__(**kwargs) - self.type = 'EnvironmentVariableSetup' + self.type = 'EnvironmentVariableSetup' # type: str self.variable_name = kwargs['variable_name'] self.variable_value = kwargs['variable_value'] @@ -12471,7 +13132,7 @@ def __init__( **kwargs ): super(ExcelDataset, self).__init__(**kwargs) - self.type = 'Excel' + self.type = 'Excel' # type: str self.location = kwargs.get('location', None) self.sheet_name = kwargs.get('sheet_name', None) self.range = kwargs.get('range', None) @@ -12525,7 +13186,7 @@ def __init__( **kwargs ): super(ExcelSource, self).__init__(**kwargs) - self.type = 'ExcelSource' + self.type = 'ExcelSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -12589,7 +13250,7 @@ def __init__( **kwargs ): super(ExecuteDataFlowActivity, self).__init__(**kwargs) - self.type = 'ExecuteDataFlow' + self.type = 'ExecuteDataFlow' # type: str self.data_flow = kwargs['data_flow'] self.staging = kwargs.get('staging', None) self.integration_runtime = kwargs.get('integration_runtime', None) @@ -12671,7 +13332,7 @@ def __init__( **kwargs ): super(ExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'ExecutePipeline' + self.type = 'ExecutePipeline' # type: str self.pipeline = kwargs['pipeline'] self.parameters = kwargs.get('parameters', None) self.wait_on_completion = kwargs.get('wait_on_completion', None) @@ -12768,7 +13429,7 @@ def __init__( **kwargs ): super(ExecuteSsisPackageActivity, self).__init__(**kwargs) - self.type = 'ExecuteSSISPackage' + self.type = 'ExecuteSSISPackage' # type: str self.package_location = kwargs['package_location'] self.runtime = kwargs.get('runtime', None) self.logging_level = kwargs.get('logging_level', None) @@ -12783,41 +13444,56 @@ def __init__( self.log_location = kwargs.get('log_location', None) -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. +class ExposureControlBatchRequest(msrest.serialization.Model): + """A list of exposure control features. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str + :param exposure_control_requests: Required. List of exposure control features. + :type exposure_control_requests: + list[~data_factory_management_client.models.ExposureControlRequest] """ _validation = { - 'type': {'required': True}, + 'exposure_control_requests': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'exposure_control_requests': {'key': 'exposureControlRequests', 'type': '[ExposureControlRequest]'}, } - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + def __init__( + self, + **kwargs + ): + super(ExposureControlBatchRequest, self).__init__(**kwargs) + self.exposure_control_requests = kwargs['exposure_control_requests'] + + +class ExposureControlBatchResponse(msrest.serialization.Model): + """A list of exposure control feature values. + + All required parameters must be populated in order to send to Azure. + + :param exposure_control_responses: Required. List of exposure control feature values. + :type exposure_control_responses: + list[~data_factory_management_client.models.ExposureControlResponse] + """ + + _validation = { + 'exposure_control_responses': {'required': True}, + } + + _attribute_map = { + 'exposure_control_responses': {'key': 'exposureControlResponses', 'type': '[ExposureControlResponse]'}, } def __init__( self, **kwargs ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ExportSettings' + super(ExposureControlBatchResponse, self).__init__(**kwargs) + self.exposure_control_responses = kwargs['exposure_control_responses'] class ExposureControlRequest(msrest.serialization.Model): @@ -12987,6 +13663,9 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -13013,6 +13692,7 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -13027,6 +13707,7 @@ def __init__( self.version = None self.repo_configuration = kwargs.get('repo_configuration', None) self.global_parameters = kwargs.get('global_parameters', None) + self.public_network_access = kwargs.get('public_network_access', None) class FactoryRepoConfiguration(msrest.serialization.Model): @@ -13077,7 +13758,7 @@ def __init__( **kwargs ): super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.type = None + self.type = None # type: Optional[str] self.account_name = kwargs['account_name'] self.repository_name = kwargs['repository_name'] self.collaboration_branch = kwargs['collaboration_branch'] @@ -13129,7 +13810,7 @@ def __init__( **kwargs ): super(FactoryGitHubConfiguration, self).__init__(**kwargs) - self.type = 'FactoryGitHubConfiguration' + self.type = 'FactoryGitHubConfiguration' # type: str self.host_name = kwargs.get('host_name', None) @@ -13295,7 +13976,7 @@ def __init__( **kwargs ): super(FactoryVstsConfiguration, self).__init__(**kwargs) - self.type = 'FactoryVSTSConfiguration' + self.type = 'FactoryVSTSConfiguration' # type: str self.project_name = kwargs['project_name'] self.tenant_id = kwargs.get('tenant_id', None) @@ -13355,7 +14036,7 @@ def __init__( **kwargs ): super(FileServerLinkedService, self).__init__(**kwargs) - self.type = 'FileServer' + self.type = 'FileServer' # type: str self.host = kwargs['host'] self.user_id = kwargs.get('user_id', None) self.password = kwargs.get('password', None) @@ -13396,7 +14077,7 @@ def __init__( **kwargs ): super(FileServerLocation, self).__init__(**kwargs) - self.type = 'FileServerLocation' + self.type = 'FileServerLocation' # type: str class FileServerReadSettings(StoreReadSettings): @@ -13469,7 +14150,7 @@ def __init__( **kwargs ): super(FileServerReadSettings, self).__init__(**kwargs) - self.type = 'FileServerReadSettings' + self.type = 'FileServerReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -13515,7 +14196,7 @@ def __init__( **kwargs ): super(FileServerWriteSettings, self).__init__(**kwargs) - self.type = 'FileServerWriteSettings' + self.type = 'FileServerWriteSettings' # type: str class FileShareDataset(Dataset): @@ -13595,7 +14276,7 @@ def __init__( **kwargs ): super(FileShareDataset, self).__init__(**kwargs) - self.type = 'FileShare' + self.type = 'FileShare' # type: str self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) @@ -13654,7 +14335,7 @@ def __init__( **kwargs ): super(FileSystemSink, self).__init__(**kwargs) - self.type = 'FileSystemSink' + self.type = 'FileSystemSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) @@ -13704,7 +14385,7 @@ def __init__( **kwargs ): super(FileSystemSource, self).__init__(**kwargs) - self.type = 'FileSystemSource' + self.type = 'FileSystemSource' # type: str self.recursive = kwargs.get('recursive', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -13756,7 +14437,7 @@ def __init__( **kwargs ): super(FilterActivity, self).__init__(**kwargs) - self.type = 'Filter' + self.type = 'Filter' # type: str self.items = kwargs['items'] self.condition = kwargs['condition'] @@ -13816,7 +14497,7 @@ def __init__( **kwargs ): super(ForEachActivity, self).__init__(**kwargs) - self.type = 'ForEach' + self.type = 'ForEach' # type: str self.is_sequential = kwargs.get('is_sequential', None) self.batch_count = kwargs.get('batch_count', None) self.items = kwargs['items'] @@ -13884,7 +14565,7 @@ def __init__( **kwargs ): super(FtpReadSettings, self).__init__(**kwargs) - self.type = 'FtpReadSettings' + self.type = 'FtpReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -13967,7 +14648,7 @@ def __init__( **kwargs ): super(FtpServerLinkedService, self).__init__(**kwargs) - self.type = 'FtpServer' + self.type = 'FtpServer' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -14012,7 +14693,7 @@ def __init__( **kwargs ): super(FtpServerLocation, self).__init__(**kwargs) - self.type = 'FtpServerLocation' + self.type = 'FtpServerLocation' # type: str class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): @@ -14097,7 +14778,7 @@ def __init__( **kwargs ): super(GetMetadataActivity, self).__init__(**kwargs) - self.type = 'GetMetadata' + self.type = 'GetMetadata' # type: str self.dataset = kwargs['dataset'] self.field_list = kwargs.get('field_list', None) self.store_settings = kwargs.get('store_settings', None) @@ -14296,7 +14977,7 @@ def __init__( **kwargs ): super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.type = 'GoogleAdWords' + self.type = 'GoogleAdWords' # type: str self.client_customer_id = kwargs['client_customer_id'] self.developer_token = kwargs['developer_token'] self.authentication_type = kwargs['authentication_type'] @@ -14364,7 +15045,7 @@ def __init__( **kwargs ): super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleAdWordsObject' + self.type = 'GoogleAdWordsObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -14418,7 +15099,7 @@ def __init__( **kwargs ): super(GoogleAdWordsSource, self).__init__(**kwargs) - self.type = 'GoogleAdWordsSource' + self.type = 'GoogleAdWordsSource' # type: str self.query = kwargs.get('query', None) @@ -14513,7 +15194,7 @@ def __init__( **kwargs ): super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.type = 'GoogleBigQuery' + self.type = 'GoogleBigQuery' # type: str self.project = kwargs['project'] self.additional_projects = kwargs.get('additional_projects', None) self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) @@ -14591,7 +15272,7 @@ def __init__( **kwargs ): super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleBigQueryObject' + self.type = 'GoogleBigQueryObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.dataset = kwargs.get('dataset', None) @@ -14647,7 +15328,7 @@ def __init__( **kwargs ): super(GoogleBigQuerySource, self).__init__(**kwargs) - self.type = 'GoogleBigQuerySource' + self.type = 'GoogleBigQuerySource' # type: str self.query = kwargs.get('query', None) @@ -14708,7 +15389,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) - self.type = 'GoogleCloudStorage' + self.type = 'GoogleCloudStorage' # type: str self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) @@ -14757,7 +15438,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageLocation, self).__init__(**kwargs) - self.type = 'GoogleCloudStorageLocation' + self.type = 'GoogleCloudStorageLocation' # type: str self.bucket_name = kwargs.get('bucket_name', None) self.version = kwargs.get('version', None) @@ -14832,7 +15513,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageReadSettings, self).__init__(**kwargs) - self.type = 'GoogleCloudStorageReadSettings' + self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -14895,7 +15576,7 @@ def __init__( **kwargs ): super(GreenplumLinkedService, self).__init__(**kwargs) - self.type = 'Greenplum' + self.type = 'Greenplum' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -14951,7 +15632,7 @@ def __init__( **kwargs ): super(GreenplumSource, self).__init__(**kwargs) - self.type = 'GreenplumSource' + self.type = 'GreenplumSource' # type: str self.query = kwargs.get('query', None) @@ -15017,7 +15698,7 @@ def __init__( **kwargs ): super(GreenplumTableDataset, self).__init__(**kwargs) - self.type = 'GreenplumTable' + self.type = 'GreenplumTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -15107,7 +15788,7 @@ def __init__( **kwargs ): super(HBaseLinkedService, self).__init__(**kwargs) - self.type = 'HBase' + self.type = 'HBase' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.http_path = kwargs.get('http_path', None) @@ -15175,7 +15856,7 @@ def __init__( **kwargs ): super(HBaseObjectDataset, self).__init__(**kwargs) - self.type = 'HBaseObject' + self.type = 'HBaseObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -15229,7 +15910,7 @@ def __init__( **kwargs ): super(HBaseSource, self).__init__(**kwargs) - self.type = 'HBaseSource' + self.type = 'HBaseSource' # type: str self.query = kwargs.get('query', None) @@ -15292,7 +15973,7 @@ def __init__( **kwargs ): super(HdfsLinkedService, self).__init__(**kwargs) - self.type = 'Hdfs' + self.type = 'Hdfs' # type: str self.url = kwargs['url'] self.authentication_type = kwargs.get('authentication_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -15334,7 +16015,7 @@ def __init__( **kwargs ): super(HdfsLocation, self).__init__(**kwargs) - self.type = 'HdfsLocation' + self.type = 'HdfsLocation' # type: str class HdfsReadSettings(StoreReadSettings): @@ -15376,6 +16057,9 @@ class HdfsReadSettings(StoreReadSettings): :type modified_datetime_end: object :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~data_factory_management_client.models.DistcpSettings + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object """ _validation = { @@ -15395,6 +16079,7 @@ class HdfsReadSettings(StoreReadSettings): 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, } def __init__( @@ -15402,7 +16087,7 @@ def __init__( **kwargs ): super(HdfsReadSettings, self).__init__(**kwargs) - self.type = 'HdfsReadSettings' + self.type = 'HdfsReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -15412,6 +16097,7 @@ def __init__( self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.distcp_settings = kwargs.get('distcp_settings', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) class HdfsSource(CopySource): @@ -15459,7 +16145,7 @@ def __init__( **kwargs ): super(HdfsSource, self).__init__(**kwargs) - self.type = 'HdfsSource' + self.type = 'HdfsSource' # type: str self.recursive = kwargs.get('recursive', None) self.distcp_settings = kwargs.get('distcp_settings', None) @@ -15536,7 +16222,7 @@ def __init__( **kwargs ): super(HdInsightHiveActivity, self).__init__(**kwargs) - self.type = 'HDInsightHive' + self.type = 'HDInsightHive' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -15618,7 +16304,7 @@ def __init__( **kwargs ): super(HdInsightLinkedService, self).__init__(**kwargs) - self.type = 'HDInsight' + self.type = 'HDInsight' # type: str self.cluster_uri = kwargs['cluster_uri'] self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) @@ -15702,7 +16388,7 @@ def __init__( **kwargs ): super(HdInsightMapReduceActivity, self).__init__(**kwargs) - self.type = 'HDInsightMapReduce' + self.type = 'HDInsightMapReduce' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -15892,7 +16578,7 @@ def __init__( **kwargs ): super(HdInsightOnDemandLinkedService, self).__init__(**kwargs) - self.type = 'HDInsightOnDemand' + self.type = 'HDInsightOnDemand' # type: str self.cluster_size = kwargs['cluster_size'] self.time_to_live = kwargs['time_to_live'] self.version = kwargs['version'] @@ -15994,7 +16680,7 @@ def __init__( **kwargs ): super(HdInsightPigActivity, self).__init__(**kwargs) - self.type = 'HDInsightPig' + self.type = 'HDInsightPig' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -16079,7 +16765,7 @@ def __init__( **kwargs ): super(HdInsightSparkActivity, self).__init__(**kwargs) - self.type = 'HDInsightSpark' + self.type = 'HDInsightSpark' # type: str self.root_path = kwargs['root_path'] self.entry_file_path = kwargs['entry_file_path'] self.arguments = kwargs.get('arguments', None) @@ -16180,7 +16866,7 @@ def __init__( **kwargs ): super(HdInsightStreamingActivity, self).__init__(**kwargs) - self.type = 'HDInsightStreaming' + self.type = 'HDInsightStreaming' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -16303,7 +16989,7 @@ def __init__( **kwargs ): super(HiveLinkedService, self).__init__(**kwargs) - self.type = 'Hive' + self.type = 'Hive' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.server_type = kwargs.get('server_type', None) @@ -16385,7 +17071,7 @@ def __init__( **kwargs ): super(HiveObjectDataset, self).__init__(**kwargs) - self.type = 'HiveObject' + self.type = 'HiveObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -16441,7 +17127,7 @@ def __init__( **kwargs ): super(HiveSource, self).__init__(**kwargs) - self.type = 'HiveSource' + self.type = 'HiveSource' # type: str self.query = kwargs.get('query', None) @@ -16521,7 +17207,7 @@ def __init__( **kwargs ): super(HttpDataset, self).__init__(**kwargs) - self.type = 'HttpFile' + self.type = 'HttpFile' # type: str self.relative_url = kwargs.get('relative_url', None) self.request_method = kwargs.get('request_method', None) self.request_body = kwargs.get('request_body', None) @@ -16606,7 +17292,7 @@ def __init__( **kwargs ): super(HttpLinkedService, self).__init__(**kwargs) - self.type = 'HttpServer' + self.type = 'HttpServer' # type: str self.url = kwargs['url'] self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) @@ -16670,7 +17356,7 @@ def __init__( **kwargs ): super(HttpReadSettings, self).__init__(**kwargs) - self.type = 'HttpReadSettings' + self.type = 'HttpReadSettings' # type: str self.request_method = kwargs.get('request_method', None) self.request_body = kwargs.get('request_body', None) self.additional_headers = kwargs.get('additional_headers', None) @@ -16717,7 +17403,7 @@ def __init__( **kwargs ): super(HttpServerLocation, self).__init__(**kwargs) - self.type = 'HttpServerLocation' + self.type = 'HttpServerLocation' # type: str self.relative_url = kwargs.get('relative_url', None) @@ -16765,7 +17451,7 @@ def __init__( **kwargs ): super(HttpSource, self).__init__(**kwargs) - self.type = 'HttpSource' + self.type = 'HttpSource' # type: str self.http_request_timeout = kwargs.get('http_request_timeout', None) @@ -16840,7 +17526,7 @@ def __init__( **kwargs ): super(HubspotLinkedService, self).__init__(**kwargs) - self.type = 'Hubspot' + self.type = 'Hubspot' # type: str self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) self.access_token = kwargs.get('access_token', None) @@ -16905,7 +17591,7 @@ def __init__( **kwargs ): super(HubspotObjectDataset, self).__init__(**kwargs) - self.type = 'HubspotObject' + self.type = 'HubspotObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -16959,7 +17645,7 @@ def __init__( **kwargs ): super(HubspotSource, self).__init__(**kwargs) - self.type = 'HubspotSource' + self.type = 'HubspotSource' # type: str self.query = kwargs.get('query', None) @@ -17016,7 +17702,7 @@ def __init__( **kwargs ): super(IfConditionActivity, self).__init__(**kwargs) - self.type = 'IfCondition' + self.type = 'IfCondition' # type: str self.expression = kwargs['expression'] self.if_true_activities = kwargs.get('if_true_activities', None) self.if_false_activities = kwargs.get('if_false_activities', None) @@ -17108,7 +17794,7 @@ def __init__( **kwargs ): super(ImpalaLinkedService, self).__init__(**kwargs) - self.type = 'Impala' + self.type = 'Impala' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.authentication_type = kwargs['authentication_type'] @@ -17185,7 +17871,7 @@ def __init__( **kwargs ): super(ImpalaObjectDataset, self).__init__(**kwargs) - self.type = 'ImpalaObject' + self.type = 'ImpalaObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -17241,47 +17927,10 @@ def __init__( **kwargs ): super(ImpalaSource, self).__init__(**kwargs) - self.type = 'ImpalaSource' + self.type = 'ImpalaSource' # type: str self.query = kwargs.get('query', None) -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ImportSettings' - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -17347,7 +17996,7 @@ def __init__( **kwargs ): super(InformixLinkedService, self).__init__(**kwargs) - self.type = 'Informix' + self.type = 'Informix' # type: str self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) self.credential = kwargs.get('credential', None) @@ -17406,7 +18055,7 @@ def __init__( **kwargs ): super(InformixSink, self).__init__(**kwargs) - self.type = 'InformixSink' + self.type = 'InformixSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -17459,7 +18108,7 @@ def __init__( **kwargs ): super(InformixSource, self).__init__(**kwargs) - self.type = 'InformixSource' + self.type = 'InformixSource' # type: str self.query = kwargs.get('query', None) @@ -17518,7 +18167,7 @@ def __init__( **kwargs ): super(InformixTableDataset, self).__init__(**kwargs) - self.type = 'InformixTable' + self.type = 'InformixTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -17560,7 +18209,7 @@ def __init__( ): super(IntegrationRuntime, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'IntegrationRuntime' + self.type = 'IntegrationRuntime' # type: str self.description = kwargs.get('description', None) @@ -18207,7 +18856,7 @@ def __init__( ): super(IntegrationRuntimeStatus, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'IntegrationRuntimeStatus' + self.type = 'IntegrationRuntimeStatus' # type: str self.data_factory_name = None self.state = None @@ -18379,7 +19028,7 @@ def __init__( **kwargs ): super(JiraLinkedService, self).__init__(**kwargs) - self.type = 'Jira' + self.type = 'Jira' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.username = kwargs['username'] @@ -18444,7 +19093,7 @@ def __init__( **kwargs ): super(JiraObjectDataset, self).__init__(**kwargs) - self.type = 'JiraObject' + self.type = 'JiraObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -18498,7 +19147,7 @@ def __init__( **kwargs ): super(JiraSource, self).__init__(**kwargs) - self.type = 'JiraSource' + self.type = 'JiraSource' # type: str self.query = kwargs.get('query', None) @@ -18566,7 +19215,7 @@ def __init__( **kwargs ): super(JsonDataset, self).__init__(**kwargs) - self.type = 'Json' + self.type = 'Json' # type: str self.location = kwargs.get('location', None) self.encoding_name = kwargs.get('encoding_name', None) self.compression = kwargs.get('compression', None) @@ -18631,7 +19280,7 @@ def __init__( **kwargs ): super(JsonFormat, self).__init__(**kwargs) - self.type = 'JsonFormat' + self.type = 'JsonFormat' # type: str self.file_pattern = kwargs.get('file_pattern', None) self.nesting_separator = kwargs.get('nesting_separator', None) self.encoding_name = kwargs.get('encoding_name', None) @@ -18668,7 +19317,7 @@ def __init__( **kwargs ): super(JsonReadSettings, self).__init__(**kwargs) - self.type = 'JsonReadSettings' + self.type = 'JsonReadSettings' # type: str self.compression_properties = kwargs.get('compression_properties', None) @@ -18724,7 +19373,7 @@ def __init__( **kwargs ): super(JsonSink, self).__init__(**kwargs) - self.type = 'JsonSink' + self.type = 'JsonSink' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) @@ -18777,7 +19426,7 @@ def __init__( **kwargs ): super(JsonSource, self).__init__(**kwargs) - self.type = 'JsonSource' + self.type = 'JsonSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -18814,7 +19463,7 @@ def __init__( **kwargs ): super(JsonWriteSettings, self).__init__(**kwargs) - self.type = 'JsonWriteSettings' + self.type = 'JsonWriteSettings' # type: str self.file_pattern = kwargs.get('file_pattern', None) @@ -18895,7 +19544,7 @@ def __init__( **kwargs ): super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type = None + self.authorization_type = None # type: Optional[str] class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): @@ -18925,7 +19574,7 @@ def __init__( **kwargs ): super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.authorization_type = 'Key' + self.authorization_type = 'Key' # type: str self.key = kwargs['key'] @@ -18956,7 +19605,7 @@ def __init__( **kwargs ): super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.authorization_type = 'RBAC' + self.authorization_type = 'RBAC' # type: str self.resource_id = kwargs['resource_id'] @@ -19122,8 +19771,74 @@ def __init__( self.properties = kwargs['properties'] +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = kwargs['linked_service_name'] + self.path = kwargs.get('path', None) + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) + self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) + self.log_location_settings = kwargs['log_location_settings'] + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -19135,6 +19850,12 @@ class LogStorageSettings(msrest.serialization.Model): :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object """ _validation = { @@ -19145,6 +19866,8 @@ class LogStorageSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'path': {'key': 'path', 'type': 'object'}, + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, } def __init__( @@ -19155,6 +19878,8 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.linked_service_name = kwargs['linked_service_name'] self.path = kwargs.get('path', None) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) class LookupActivity(ExecutionActivity): @@ -19214,7 +19939,7 @@ def __init__( **kwargs ): super(LookupActivity, self).__init__(**kwargs) - self.type = 'Lookup' + self.type = 'Lookup' # type: str self.source = kwargs['source'] self.dataset = kwargs['dataset'] self.first_row_only = kwargs.get('first_row_only', None) @@ -19283,7 +20008,7 @@ def __init__( **kwargs ): super(MagentoLinkedService, self).__init__(**kwargs) - self.type = 'Magento' + self.type = 'Magento' # type: str self.host = kwargs['host'] self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -19346,7 +20071,7 @@ def __init__( **kwargs ): super(MagentoObjectDataset, self).__init__(**kwargs) - self.type = 'MagentoObject' + self.type = 'MagentoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -19400,7 +20125,7 @@ def __init__( **kwargs ): super(MagentoSource, self).__init__(**kwargs) - self.type = 'MagentoSource' + self.type = 'MagentoSource' # type: str self.query = kwargs.get('query', None) @@ -19449,7 +20174,7 @@ def __init__( **kwargs ): super(ManagedIntegrationRuntime, self).__init__(**kwargs) - self.type = 'Managed' + self.type = 'Managed' # type: str self.state = None self.compute_properties = kwargs.get('compute_properties', None) self.ssis_properties = kwargs.get('ssis_properties', None) @@ -19653,13 +20378,264 @@ def __init__( **kwargs ): super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.type = 'Managed' + self.type = 'Managed' # type: str self.create_time = None self.nodes = None self.other_errors = None self.last_operation = None +class ManagedPrivateEndpoint(msrest.serialization.Model): + """Properties of a managed private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param connection_state: The managed private endpoint connection state. + :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :param fqdns: Fully qualified domain names. + :type fqdns: list[str] + :param group_id: The groupId to which the managed private endpoint is created. + :type group_id: str + :ivar is_reserved: Denotes whether the managed private endpoint is reserved. + :vartype is_reserved: bool + :param private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :type private_link_resource_id: str + :ivar provisioning_state: The managed private endpoint provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'is_reserved': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connection_state': {'key': 'connectionState', 'type': 'ConnectionStateProperties'}, + 'fqdns': {'key': 'fqdns', 'type': '[str]'}, + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'is_reserved': {'key': 'isReserved', 'type': 'bool'}, + 'private_link_resource_id': {'key': 'privateLinkResourceId', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedPrivateEndpoint, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.connection_state = kwargs.get('connection_state', None) + self.fqdns = kwargs.get('fqdns', None) + self.group_id = kwargs.get('group_id', None) + self.is_reserved = None + self.private_link_resource_id = kwargs.get('private_link_resource_id', None) + self.provisioning_state = None + + +class ManagedPrivateEndpointListResponse(msrest.serialization.Model): + """A list of managed private endpoint resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of managed private endpoints. + :type value: list[~data_factory_management_client.models.ManagedPrivateEndpointResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ManagedPrivateEndpointResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedPrivateEndpointListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class ManagedPrivateEndpointResource(SubResource): + """Managed private endpoint resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param connection_state: The managed private endpoint connection state. + :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :param fqdns: Fully qualified domain names. + :type fqdns: list[str] + :param group_id: The groupId to which the managed private endpoint is created. + :type group_id: str + :ivar is_reserved: Denotes whether the managed private endpoint is reserved. + :vartype is_reserved: bool + :param private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :type private_link_resource_id: str + :ivar provisioning_state: The managed private endpoint provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'is_reserved': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'connection_state': {'key': 'properties.connectionState', 'type': 'ConnectionStateProperties'}, + 'fqdns': {'key': 'properties.fqdns', 'type': '[str]'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'is_reserved': {'key': 'properties.isReserved', 'type': 'bool'}, + 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedPrivateEndpointResource, self).__init__(**kwargs) + self.connection_state = kwargs.get('connection_state', None) + self.fqdns = kwargs.get('fqdns', None) + self.group_id = kwargs.get('group_id', None) + self.is_reserved = None + self.private_link_resource_id = kwargs.get('private_link_resource_id', None) + self.provisioning_state = None + + +class ManagedVirtualNetwork(msrest.serialization.Model): + """A managed Virtual Network associated with the Azure Data Factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar v_net_id: Managed Virtual Network ID. + :vartype v_net_id: str + :ivar alias: Managed Virtual Network alias. + :vartype alias: str + """ + + _validation = { + 'v_net_id': {'readonly': True}, + 'alias': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'alias': {'key': 'alias', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedVirtualNetwork, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = None + self.alias = None + + +class ManagedVirtualNetworkListResponse(msrest.serialization.Model): + """A list of managed Virtual Network resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of managed Virtual Networks. + :type value: list[~data_factory_management_client.models.ManagedVirtualNetworkResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ManagedVirtualNetworkResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedVirtualNetworkListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class ManagedVirtualNetworkResource(SubResource): + """Managed Virtual Network resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Managed Virtual Network properties. + :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ManagedVirtualNetwork'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedVirtualNetworkResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + class MappingDataFlow(DataFlow): """Mapping data flow. @@ -19698,7 +20674,7 @@ def __init__( **kwargs ): super(MappingDataFlow, self).__init__(**kwargs) - self.type = 'MappingDataFlow' + self.type = 'MappingDataFlow' # type: str self.sources = kwargs.get('sources', None) self.sinks = kwargs.get('sinks', None) self.transformations = kwargs.get('transformations', None) @@ -19755,7 +20731,7 @@ def __init__( **kwargs ): super(MariaDBLinkedService, self).__init__(**kwargs) - self.type = 'MariaDB' + self.type = 'MariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -19811,7 +20787,7 @@ def __init__( **kwargs ): super(MariaDBSource, self).__init__(**kwargs) - self.type = 'MariaDBSource' + self.type = 'MariaDBSource' # type: str self.query = kwargs.get('query', None) @@ -19869,7 +20845,7 @@ def __init__( **kwargs ): super(MariaDBTableDataset, self).__init__(**kwargs) - self.type = 'MariaDBTable' + self.type = 'MariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -19940,7 +20916,7 @@ def __init__( **kwargs ): super(MarketoLinkedService, self).__init__(**kwargs) - self.type = 'Marketo' + self.type = 'Marketo' # type: str self.endpoint = kwargs['endpoint'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -20004,7 +20980,7 @@ def __init__( **kwargs ): super(MarketoObjectDataset, self).__init__(**kwargs) - self.type = 'MarketoObject' + self.type = 'MarketoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -20058,7 +21034,7 @@ def __init__( **kwargs ): super(MarketoSource, self).__init__(**kwargs) - self.type = 'MarketoSource' + self.type = 'MarketoSource' # type: str self.query = kwargs.get('query', None) @@ -20127,7 +21103,7 @@ def __init__( **kwargs ): super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.type = 'MicrosoftAccess' + self.type = 'MicrosoftAccess' # type: str self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) self.credential = kwargs.get('credential', None) @@ -20186,7 +21162,7 @@ def __init__( **kwargs ): super(MicrosoftAccessSink, self).__init__(**kwargs) - self.type = 'MicrosoftAccessSink' + self.type = 'MicrosoftAccessSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -20235,7 +21211,7 @@ def __init__( **kwargs ): super(MicrosoftAccessSource, self).__init__(**kwargs) - self.type = 'MicrosoftAccessSource' + self.type = 'MicrosoftAccessSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -20267,14 +21243,74 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~data_factory_management_client.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MongoDBAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -20287,16 +21323,138 @@ class MicrosoftAccessTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.type = 'MicrosoftAccessTable' - self.table_name = kwargs.get('table_name', None) + super(MongoDBAtlasCollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = kwargs['collection'] + + +class MongoDBAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDBAtlasLinkedService, self).__init__(**kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] + + +class MongoDBAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDBAtlasSource, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) class MongoDBCollectionDataset(Dataset): @@ -20355,7 +21513,7 @@ def __init__( **kwargs ): super(MongoDBCollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbCollection' + self.type = 'MongoDbCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -20483,7 +21641,7 @@ def __init__( **kwargs ): super(MongoDBLinkedService, self).__init__(**kwargs) - self.type = 'MongoDb' + self.type = 'MongoDb' # type: str self.server = kwargs['server'] self.authentication_type = kwargs.get('authentication_type', None) self.database_name = kwargs['database_name'] @@ -20542,7 +21700,7 @@ def __init__( **kwargs ): super(MongoDBSource, self).__init__(**kwargs) - self.type = 'MongoDbSource' + self.type = 'MongoDbSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -20603,7 +21761,7 @@ def __init__( **kwargs ): super(MongoDBV2CollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbV2Collection' + self.type = 'MongoDbV2Collection' # type: str self.collection = kwargs['collection'] @@ -20655,7 +21813,7 @@ def __init__( **kwargs ): super(MongoDBV2LinkedService, self).__init__(**kwargs) - self.type = 'MongoDbV2' + self.type = 'MongoDbV2' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] @@ -20720,7 +21878,7 @@ def __init__( **kwargs ): super(MongoDBV2Source, self).__init__(**kwargs) - self.type = 'MongoDbV2Source' + self.type = 'MongoDbV2Source' # type: str self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) @@ -20728,7 +21886,7 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) -class MySqlLinkedService(LinkedService): +class MySQLLinkedService(LinkedService): """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -20777,14 +21935,14 @@ def __init__( self, **kwargs ): - super(MySqlLinkedService, self).__init__(**kwargs) - self.type = 'MySql' + super(MySQLLinkedService, self).__init__(**kwargs) + self.type = 'MySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MySqlSource(TabularSource): +class MySQLSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -20832,12 +21990,12 @@ def __init__( self, **kwargs ): - super(MySqlSource, self).__init__(**kwargs) - self.type = 'MySqlSource' + super(MySQLSource, self).__init__(**kwargs) + self.type = 'MySqlSource' # type: str self.query = kwargs.get('query', None) -class MySqlTableDataset(Dataset): +class MySQLTableDataset(Dataset): """The MySQL table dataset. All required parameters must be populated in order to send to Azure. @@ -20890,8 +22048,8 @@ def __init__( self, **kwargs ): - super(MySqlTableDataset, self).__init__(**kwargs) - self.type = 'MySqlTable' + super(MySQLTableDataset, self).__init__(**kwargs) + self.type = 'MySqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -20945,7 +22103,7 @@ def __init__( **kwargs ): super(NetezzaLinkedService, self).__init__(**kwargs) - self.type = 'Netezza' + self.type = 'Netezza' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -21040,7 +22198,7 @@ def __init__( **kwargs ): super(NetezzaSource, self).__init__(**kwargs) - self.type = 'NetezzaSource' + self.type = 'NetezzaSource' # type: str self.query = kwargs.get('query', None) self.partition_option = kwargs.get('partition_option', None) self.partition_settings = kwargs.get('partition_settings', None) @@ -21109,7 +22267,7 @@ def __init__( **kwargs ): super(NetezzaTableDataset, self).__init__(**kwargs) - self.type = 'NetezzaTable' + self.type = 'NetezzaTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -21152,13 +22310,17 @@ class ODataLinkedService(LinkedService): :param service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_id: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). :type aad_resource_id: object :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType + ~data_factory_management_client.models.ODataAADServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~data_factory_management_client.models.SecretBase @@ -21195,6 +22357,7 @@ class ODataLinkedService(LinkedService): 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, @@ -21208,13 +22371,14 @@ def __init__( **kwargs ): super(ODataLinkedService, self).__init__(**kwargs) - self.type = 'OData' + self.type = 'OData' # type: str self.url = kwargs['url'] self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) self.tenant = kwargs.get('tenant', None) self.service_principal_id = kwargs.get('service_principal_id', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.aad_resource_id = kwargs.get('aad_resource_id', None) self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) self.service_principal_key = kwargs.get('service_principal_key', None) @@ -21277,7 +22441,7 @@ def __init__( **kwargs ): super(ODataResourceDataset, self).__init__(**kwargs) - self.type = 'ODataResource' + self.type = 'ODataResource' # type: str self.path = kwargs.get('path', None) @@ -21333,7 +22497,7 @@ def __init__( **kwargs ): super(ODataSource, self).__init__(**kwargs) - self.type = 'ODataSource' + self.type = 'ODataSource' # type: str self.query = kwargs.get('query', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -21403,7 +22567,7 @@ def __init__( **kwargs ): super(OdbcLinkedService, self).__init__(**kwargs) - self.type = 'Odbc' + self.type = 'Odbc' # type: str self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) self.credential = kwargs.get('credential', None) @@ -21462,7 +22626,7 @@ def __init__( **kwargs ): super(OdbcSink, self).__init__(**kwargs) - self.type = 'OdbcSink' + self.type = 'OdbcSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -21515,7 +22679,7 @@ def __init__( **kwargs ): super(OdbcSource, self).__init__(**kwargs) - self.type = 'OdbcSource' + self.type = 'OdbcSource' # type: str self.query = kwargs.get('query', None) @@ -21573,7 +22737,7 @@ def __init__( **kwargs ): super(OdbcTableDataset, self).__init__(**kwargs) - self.type = 'OdbcTable' + self.type = 'OdbcTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -21637,7 +22801,7 @@ def __init__( **kwargs ): super(Office365Dataset, self).__init__(**kwargs) - self.type = 'Office365Table' + self.type = 'Office365Table' # type: str self.table_name = kwargs['table_name'] self.predicate = kwargs.get('predicate', None) @@ -21704,7 +22868,7 @@ def __init__( **kwargs ): super(Office365LinkedService, self).__init__(**kwargs) - self.type = 'Office365' + self.type = 'Office365' # type: str self.office365_tenant_id = kwargs['office365_tenant_id'] self.service_principal_tenant_id = kwargs['service_principal_tenant_id'] self.service_principal_id = kwargs['service_principal_id'] @@ -21775,7 +22939,7 @@ def __init__( **kwargs ): super(Office365Source, self).__init__(**kwargs) - self.type = 'Office365Source' + self.type = 'Office365Source' # type: str self.allowed_groups = kwargs.get('allowed_groups', None) self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) self.date_filter_column = kwargs.get('date_filter_column', None) @@ -22078,7 +23242,7 @@ def __init__( **kwargs ): super(OracleLinkedService, self).__init__(**kwargs) - self.type = 'Oracle' + self.type = 'Oracle' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -22190,7 +23354,7 @@ def __init__( **kwargs ): super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'OracleServiceCloud' + self.type = 'OracleServiceCloud' # type: str self.host = kwargs['host'] self.username = kwargs['username'] self.password = kwargs['password'] @@ -22254,7 +23418,7 @@ def __init__( **kwargs ): super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'OracleServiceCloudObject' + self.type = 'OracleServiceCloudObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -22308,7 +23472,7 @@ def __init__( **kwargs ): super(OracleServiceCloudSource, self).__init__(**kwargs) - self.type = 'OracleServiceCloudSource' + self.type = 'OracleServiceCloudSource' # type: str self.query = kwargs.get('query', None) @@ -22362,7 +23526,7 @@ def __init__( **kwargs ): super(OracleSink, self).__init__(**kwargs) - self.type = 'OracleSink' + self.type = 'OracleSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -22423,7 +23587,7 @@ def __init__( **kwargs ): super(OracleSource, self).__init__(**kwargs) - self.type = 'OracleSource' + self.type = 'OracleSource' # type: str self.oracle_reader_query = kwargs.get('oracle_reader_query', None) self.query_timeout = kwargs.get('query_timeout', None) self.partition_option = kwargs.get('partition_option', None) @@ -22494,7 +23658,7 @@ def __init__( **kwargs ): super(OracleTableDataset, self).__init__(**kwargs) - self.type = 'OracleTable' + self.type = 'OracleTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -22529,7 +23693,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -22557,7 +23721,7 @@ def __init__( **kwargs ): super(OrcDataset, self).__init__(**kwargs) - self.type = 'Orc' + self.type = 'Orc' # type: str self.location = kwargs.get('location', None) self.orc_compression_codec = kwargs.get('orc_compression_codec', None) @@ -22594,7 +23758,7 @@ def __init__( **kwargs ): super(OrcFormat, self).__init__(**kwargs) - self.type = 'OrcFormat' + self.type = 'OrcFormat' # type: str class OrcSink(CopySink): @@ -22624,6 +23788,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -22639,6 +23805,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -22646,8 +23813,9 @@ def __init__( **kwargs ): super(OrcSink, self).__init__(**kwargs) - self.type = 'OrcSink' + self.type = 'OrcSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class OrcSource(CopySource): @@ -22695,11 +23863,51 @@ def __init__( **kwargs ): super(OrcSource, self).__init__(**kwargs) - self.type = 'OrcSource' + self.type = 'OrcSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.additional_columns = kwargs.get('additional_columns', None) +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OrcWriteSettings, self).__init__(**kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -22790,7 +23998,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -22818,7 +24026,7 @@ def __init__( **kwargs ): super(ParquetDataset, self).__init__(**kwargs) - self.type = 'Parquet' + self.type = 'Parquet' # type: str self.location = kwargs.get('location', None) self.compression_codec = kwargs.get('compression_codec', None) @@ -22855,7 +24063,7 @@ def __init__( **kwargs ): super(ParquetFormat, self).__init__(**kwargs) - self.type = 'ParquetFormat' + self.type = 'ParquetFormat' # type: str class ParquetSink(CopySink): @@ -22885,6 +24093,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -22900,6 +24110,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -22907,8 +24118,9 @@ def __init__( **kwargs ): super(ParquetSink, self).__init__(**kwargs) - self.type = 'ParquetSink' + self.type = 'ParquetSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class ParquetSource(CopySource): @@ -22956,11 +24168,51 @@ def __init__( **kwargs ): super(ParquetSource, self).__init__(**kwargs) - self.type = 'ParquetSource' + self.type = 'ParquetSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.additional_columns = kwargs.get('additional_columns', None) +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -23028,7 +24280,7 @@ def __init__( **kwargs ): super(PaypalLinkedService, self).__init__(**kwargs) - self.type = 'Paypal' + self.type = 'Paypal' # type: str self.host = kwargs['host'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -23092,7 +24344,7 @@ def __init__( **kwargs ): super(PaypalObjectDataset, self).__init__(**kwargs) - self.type = 'PaypalObject' + self.type = 'PaypalObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -23146,7 +24398,7 @@ def __init__( **kwargs ): super(PaypalSource, self).__init__(**kwargs) - self.type = 'PaypalSource' + self.type = 'PaypalSource' # type: str self.query = kwargs.get('query', None) @@ -23241,7 +24493,7 @@ def __init__( **kwargs ): super(PhoenixLinkedService, self).__init__(**kwargs) - self.type = 'Phoenix' + self.type = 'Phoenix' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.http_path = kwargs.get('http_path', None) @@ -23319,7 +24571,7 @@ def __init__( **kwargs ): super(PhoenixObjectDataset, self).__init__(**kwargs) - self.type = 'PhoenixObject' + self.type = 'PhoenixObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -23375,7 +24627,7 @@ def __init__( **kwargs ): super(PhoenixSource, self).__init__(**kwargs) - self.type = 'PhoenixSource' + self.type = 'PhoenixSource' # type: str self.query = kwargs.get('query', None) @@ -23475,8 +24727,8 @@ class PipelineResource(SubResource): :type annotations: list[object] :param run_dimensions: Dimensions emitted by Pipeline. :type run_dimensions: dict[str, object] - :param name_properties_folder_name: The name of the folder that this Pipeline is in. - :type name_properties_folder_name: str + :param name_folder_name: The name of the folder that this Pipeline is in. + :type name_folder_name: str """ _validation = { @@ -23500,7 +24752,7 @@ class PipelineResource(SubResource): 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'name_properties_folder_name': {'key': 'properties.folder.name', 'type': 'str'}, + 'name_folder_name': {'key': 'folder.name', 'type': 'str'}, } def __init__( @@ -23516,7 +24768,7 @@ def __init__( self.concurrency = kwargs.get('concurrency', None) self.annotations = kwargs.get('annotations', None) self.run_dimensions = kwargs.get('run_dimensions', None) - self.name_properties_folder_name = kwargs.get('name_properties_folder_name', None) + self.name_folder_name = kwargs.get('name_folder_name', None) class PipelineRun(msrest.serialization.Model): @@ -23716,7 +24968,7 @@ def __init__( self.use_type_default = kwargs.get('use_type_default', None) -class PostgreSqlLinkedService(LinkedService): +class PostgreSQLLinkedService(LinkedService): """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -23765,14 +25017,14 @@ def __init__( self, **kwargs ): - super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'PostgreSql' + super(PostgreSQLLinkedService, self).__init__(**kwargs) + self.type = 'PostgreSql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PostgreSqlSource(TabularSource): +class PostgreSQLSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -23820,12 +25072,12 @@ def __init__( self, **kwargs ): - super(PostgreSqlSource, self).__init__(**kwargs) - self.type = 'PostgreSqlSource' + super(PostgreSQLSource, self).__init__(**kwargs) + self.type = 'PostgreSqlSource' # type: str self.query = kwargs.get('query', None) -class PostgreSqlTableDataset(Dataset): +class PostgreSQLTableDataset(Dataset): """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -23886,8 +25138,8 @@ def __init__( self, **kwargs ): - super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'PostgreSqlTable' + super(PostgreSQLTableDataset, self).__init__(**kwargs) + self.type = 'PostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -23990,7 +25242,7 @@ def __init__( **kwargs ): super(PrestoLinkedService, self).__init__(**kwargs) - self.type = 'Presto' + self.type = 'Presto' # type: str self.host = kwargs['host'] self.server_version = kwargs['server_version'] self.catalog = kwargs['catalog'] @@ -24070,7 +25322,7 @@ def __init__( **kwargs ): super(PrestoObjectDataset, self).__init__(**kwargs) - self.type = 'PrestoObject' + self.type = 'PrestoObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -24126,7 +25378,7 @@ def __init__( **kwargs ): super(PrestoSource, self).__init__(**kwargs) - self.type = 'PrestoSource' + self.type = 'PrestoSource' # type: str self.query = kwargs.get('query', None) @@ -24171,18 +25423,20 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com). + :param connection_properties: Properties used to connect to QuickBooks. It is mutually + exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object + :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to authorize. + :param company_id: The company ID of the QuickBooks company to authorize. :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. + :param consumer_key: The consumer key for OAuth 1.0 authentication. :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. + :param consumer_secret: The consumer secret for OAuth 1.0 authentication. :type consumer_secret: ~data_factory_management_client.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 authentication. + :param access_token: The access token for OAuth 1.0 authentication. :type access_token: ~data_factory_management_client.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. + :param access_token_secret: The access token secret for OAuth 1.0 authentication. :type access_token_secret: ~data_factory_management_client.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -24195,12 +25449,6 @@ class QuickBooksLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, } _attribute_map = { @@ -24210,6 +25458,7 @@ class QuickBooksLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, @@ -24225,13 +25474,14 @@ def __init__( **kwargs ): super(QuickBooksLinkedService, self).__init__(**kwargs) - self.type = 'QuickBooks' - self.endpoint = kwargs['endpoint'] - self.company_id = kwargs['company_id'] - self.consumer_key = kwargs['consumer_key'] - self.consumer_secret = kwargs['consumer_secret'] - self.access_token = kwargs['access_token'] - self.access_token_secret = kwargs['access_token_secret'] + self.type = 'QuickBooks' # type: str + self.connection_properties = kwargs.get('connection_properties', None) + self.endpoint = kwargs.get('endpoint', None) + self.company_id = kwargs.get('company_id', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.consumer_secret = kwargs.get('consumer_secret', None) + self.access_token = kwargs.get('access_token', None) + self.access_token_secret = kwargs.get('access_token_secret', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -24290,7 +25540,7 @@ def __init__( **kwargs ): super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.type = 'QuickBooksObject' + self.type = 'QuickBooksObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -24344,7 +25594,7 @@ def __init__( **kwargs ): super(QuickBooksSource, self).__init__(**kwargs) - self.type = 'QuickBooksSource' + self.type = 'QuickBooksSource' # type: str self.query = kwargs.get('query', None) @@ -24534,7 +25784,7 @@ def __init__( **kwargs ): super(RelationalSource, self).__init__(**kwargs) - self.type = 'RelationalSource' + self.type = 'RelationalSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -24594,7 +25844,7 @@ def __init__( **kwargs ): super(RelationalTableDataset, self).__init__(**kwargs) - self.type = 'RelationalTable' + self.type = 'RelationalTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -24656,7 +25906,7 @@ def __init__( **kwargs ): super(RerunTumblingWindowTrigger, self).__init__(**kwargs) - self.type = 'RerunTumblingWindowTrigger' + self.type = 'RerunTumblingWindowTrigger' # type: str self.parent_trigger = kwargs['parent_trigger'] self.requested_start_time = kwargs['requested_start_time'] self.requested_end_time = kwargs['requested_end_time'] @@ -24733,7 +25983,7 @@ def __init__( **kwargs ): super(ResponsysLinkedService, self).__init__(**kwargs) - self.type = 'Responsys' + self.type = 'Responsys' # type: str self.endpoint = kwargs['endpoint'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -24797,7 +26047,7 @@ def __init__( **kwargs ): super(ResponsysObjectDataset, self).__init__(**kwargs) - self.type = 'ResponsysObject' + self.type = 'ResponsysObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -24851,7 +26101,7 @@ def __init__( **kwargs ): super(ResponsysSource, self).__init__(**kwargs) - self.type = 'ResponsysSource' + self.type = 'ResponsysSource' # type: str self.query = kwargs.get('query', None) @@ -24926,7 +26176,7 @@ def __init__( **kwargs ): super(RestResourceDataset, self).__init__(**kwargs) - self.type = 'RestResource' + self.type = 'RestResource' # type: str self.relative_url = kwargs.get('relative_url', None) self.request_method = kwargs.get('request_method', None) self.request_body = kwargs.get('request_body', None) @@ -24976,6 +26226,10 @@ class RestServiceLinkedService(LinkedService): :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: The resource you are requesting authorization to use. :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -25005,6 +26259,7 @@ class RestServiceLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -25014,7 +26269,7 @@ def __init__( **kwargs ): super(RestServiceLinkedService, self).__init__(**kwargs) - self.type = 'RestService' + self.type = 'RestService' # type: str self.url = kwargs['url'] self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) self.authentication_type = kwargs['authentication_type'] @@ -25023,10 +26278,86 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.aad_resource_id = kwargs.get('aad_resource_id', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) +class RestSink(CopySink): + """A copy activity Rest service Sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: + string (or Expression with resultType string). + :type request_method: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next request, in milliseconds. + :type request_interval: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(RestSink, self).__init__(**kwargs) + self.type = 'RestSink' # type: str + self.request_method = kwargs.get('request_method', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.http_compression_type = kwargs.get('http_compression_type', None) + + class RestSource(CopySource): """A copy activity Rest service source. @@ -25094,7 +26425,7 @@ def __init__( **kwargs ): super(RestSource, self).__init__(**kwargs) - self.type = 'RestSource' + self.type = 'RestSource' # type: str self.request_method = kwargs.get('request_method', None) self.request_body = kwargs.get('request_body', None) self.additional_headers = kwargs.get('additional_headers', None) @@ -25316,7 +26647,7 @@ def __init__( **kwargs ): super(SalesforceLinkedService, self).__init__(**kwargs) - self.type = 'Salesforce' + self.type = 'Salesforce' # type: str self.environment_url = kwargs.get('environment_url', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -25343,8 +26674,11 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud - application. Type: string (or Expression with resultType string). + :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + mutually exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object + :param client_id: The client ID associated with the Salesforce Marketing Cloud application. + Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). @@ -25368,7 +26702,6 @@ class SalesforceMarketingCloudLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { @@ -25378,6 +26711,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -25391,8 +26725,9 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloud' - self.client_id = kwargs['client_id'] + self.type = 'SalesforceMarketingCloud' # type: str + self.connection_properties = kwargs.get('connection_properties', None) + self.client_id = kwargs.get('client_id', None) self.client_secret = kwargs.get('client_secret', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) self.use_host_verification = kwargs.get('use_host_verification', None) @@ -25454,7 +26789,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloudObject' + self.type = 'SalesforceMarketingCloudObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -25508,7 +26843,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudSource, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloudSource' + self.type = 'SalesforceMarketingCloudSource' # type: str self.query = kwargs.get('query', None) @@ -25567,7 +26902,7 @@ def __init__( **kwargs ): super(SalesforceObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceObject' + self.type = 'SalesforceObject' # type: str self.object_api_name = kwargs.get('object_api_name', None) @@ -25638,7 +26973,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloud' + self.type = 'SalesforceServiceCloud' # type: str self.environment_url = kwargs.get('environment_url', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -25703,7 +27038,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudObject' + self.type = 'SalesforceServiceCloudObject' # type: str self.object_api_name = kwargs.get('object_api_name', None) @@ -25769,7 +27104,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudSink, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudSink' + self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) self.external_id_field_name = kwargs.get('external_id_field_name', None) self.ignore_null_values = kwargs.get('ignore_null_values', None) @@ -25824,7 +27159,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudSource, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudSource' + self.type = 'SalesforceServiceCloudSource' # type: str self.query = kwargs.get('query', None) self.read_behavior = kwargs.get('read_behavior', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -25892,7 +27227,7 @@ def __init__( **kwargs ): super(SalesforceSink, self).__init__(**kwargs) - self.type = 'SalesforceSink' + self.type = 'SalesforceSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) self.external_id_field_name = kwargs.get('external_id_field_name', None) self.ignore_null_values = kwargs.get('ignore_null_values', None) @@ -25951,7 +27286,7 @@ def __init__( **kwargs ): super(SalesforceSource, self).__init__(**kwargs) - self.type = 'SalesforceSource' + self.type = 'SalesforceSource' # type: str self.query = kwargs.get('query', None) self.read_behavior = kwargs.get('read_behavior', None) @@ -26007,7 +27342,7 @@ def __init__( **kwargs ): super(SapBwCubeDataset, self).__init__(**kwargs) - self.type = 'SapBwCube' + self.type = 'SapBwCube' # type: str class SapBwLinkedService(LinkedService): @@ -26075,7 +27410,7 @@ def __init__( **kwargs ): super(SapBwLinkedService, self).__init__(**kwargs) - self.type = 'SapBW' + self.type = 'SapBW' # type: str self.server = kwargs['server'] self.system_number = kwargs['system_number'] self.client_id = kwargs['client_id'] @@ -26133,7 +27468,7 @@ def __init__( **kwargs ): super(SapBwSource, self).__init__(**kwargs) - self.type = 'SapBwSource' + self.type = 'SapBwSource' # type: str self.query = kwargs.get('query', None) @@ -26193,7 +27528,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) - self.type = 'SapCloudForCustomer' + self.type = 'SapCloudForCustomer' # type: str self.url = kwargs['url'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -26256,7 +27591,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerResource' + self.type = 'SapCloudForCustomerResource' # type: str self.path = kwargs['path'] @@ -26317,7 +27652,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerSink, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerSink' + self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) @@ -26378,7 +27713,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerSource, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerSource' + self.type = 'SapCloudForCustomerSource' # type: str self.query = kwargs.get('query', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) @@ -26439,7 +27774,7 @@ def __init__( **kwargs ): super(SapEccLinkedService, self).__init__(**kwargs) - self.type = 'SapEcc' + self.type = 'SapEcc' # type: str self.url = kwargs['url'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -26502,7 +27837,7 @@ def __init__( **kwargs ): super(SapEccResourceDataset, self).__init__(**kwargs) - self.type = 'SapEccResource' + self.type = 'SapEccResource' # type: str self.path = kwargs['path'] @@ -26562,7 +27897,7 @@ def __init__( **kwargs ): super(SapEccSource, self).__init__(**kwargs) - self.type = 'SapEccSource' + self.type = 'SapEccSource' # type: str self.query = kwargs.get('query', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) @@ -26630,7 +27965,7 @@ def __init__( **kwargs ): super(SapHanaLinkedService, self).__init__(**kwargs) - self.type = 'SapHana' + self.type = 'SapHana' # type: str self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -26720,7 +28055,7 @@ def __init__( **kwargs ): super(SapHanaSource, self).__init__(**kwargs) - self.type = 'SapHanaSource' + self.type = 'SapHanaSource' # type: str self.query = kwargs.get('query', None) self.packet_size = kwargs.get('packet_size', None) self.partition_option = kwargs.get('partition_option', None) @@ -26785,7 +28120,7 @@ def __init__( **kwargs ): super(SapHanaTableDataset, self).__init__(**kwargs) - self.type = 'SapHanaTable' + self.type = 'SapHanaTable' # type: str self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -26808,26 +28143,38 @@ class SapOpenHubLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Host name of the SAP BW instance where the open hub destination is - located. Type: string (or Expression with resultType string). + :param server: Host name of the SAP BW instance where the open hub destination is located. + Type: string (or Expression with resultType string). :type server: object - :param system_number: Required. System number of the BW system where the open hub destination - is located. (Usually a two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). + :param system_number: System number of the BW system where the open hub destination is located. + (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with + resultType string). :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where the open hub - destination is located. (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). + :param client_id: Client ID of the client on the BW system where the open hub destination is + located. (Usually a three-digit decimal number represented as a string) Type: string (or + Expression with resultType string). :type client_id: object :param language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). :type language: object + :param system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :type system_id: object :param user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP BW server where the open hub destination is located. :type password: ~data_factory_management_client.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :type message_server_service: object + :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -26836,9 +28183,6 @@ class SapOpenHubLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { @@ -26852,8 +28196,12 @@ class SapOpenHubLinkedService(LinkedService): 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -26862,13 +28210,17 @@ def __init__( **kwargs ): super(SapOpenHubLinkedService, self).__init__(**kwargs) - self.type = 'SapOpenHub' - self.server = kwargs['server'] - self.system_number = kwargs['system_number'] - self.client_id = kwargs['client_id'] + self.type = 'SapOpenHub' # type: str + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.logon_group = kwargs.get('logon_group', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -26904,6 +28256,13 @@ class SapOpenHubSource(TabularSource): requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). :type base_request_id: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object """ _validation = { @@ -26920,6 +28279,8 @@ class SapOpenHubSource(TabularSource): 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, } def __init__( @@ -26927,9 +28288,11 @@ def __init__( **kwargs ): super(SapOpenHubSource, self).__init__(**kwargs) - self.type = 'SapOpenHubSource' + self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = kwargs.get('exclude_last_request', None) self.base_request_id = kwargs.get('base_request_id', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.sap_data_column_delimiter = kwargs.get('sap_data_column_delimiter', None) class SapOpenHubTableDataset(Dataset): @@ -26997,7 +28360,7 @@ def __init__( **kwargs ): super(SapOpenHubTableDataset, self).__init__(**kwargs) - self.type = 'SapOpenHubTable' + self.type = 'SapOpenHubTable' # type: str self.open_hub_destination_name = kwargs['open_hub_destination_name'] self.exclude_last_request = kwargs.get('exclude_last_request', None) self.base_request_id = kwargs.get('base_request_id', None) @@ -27107,7 +28470,7 @@ def __init__( **kwargs ): super(SapTableLinkedService, self).__init__(**kwargs) - self.type = 'SapTable' + self.type = 'SapTable' # type: str self.server = kwargs.get('server', None) self.system_number = kwargs.get('system_number', None) self.client_id = kwargs.get('client_id', None) @@ -27219,7 +28582,7 @@ def __init__( **kwargs ): super(SapTableResourceDataset, self).__init__(**kwargs) - self.type = 'SapTableResource' + self.type = 'SapTableResource' # type: str self.table_name = kwargs['table_name'] @@ -27266,6 +28629,10 @@ class SapTableSource(TabularSource): :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". @@ -27293,6 +28660,7 @@ class SapTableSource(TabularSource): 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -27302,13 +28670,14 @@ def __init__( **kwargs ): super(SapTableSource, self).__init__(**kwargs) - self.type = 'SapTableSource' + self.type = 'SapTableSource' # type: str self.row_count = kwargs.get('row_count', None) self.row_skips = kwargs.get('row_skips', None) self.rfc_table_fields = kwargs.get('rfc_table_fields', None) self.rfc_table_options = kwargs.get('rfc_table_options', None) self.batch_size = kwargs.get('batch_size', None) self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.sap_data_column_delimiter = kwargs.get('sap_data_column_delimiter', None) self.partition_option = kwargs.get('partition_option', None) self.partition_settings = kwargs.get('partition_settings', None) @@ -27359,7 +28728,7 @@ def __init__( **kwargs ): super(ScheduleTrigger, self).__init__(**kwargs) - self.type = 'ScheduleTrigger' + self.type = 'ScheduleTrigger' # type: str self.recurrence = kwargs['recurrence'] @@ -27474,7 +28843,7 @@ def __init__( **kwargs ): super(SecureString, self).__init__(**kwargs) - self.type = 'SecureString' + self.type = 'SecureString' # type: str self.value = kwargs['value'] @@ -27510,7 +28879,7 @@ def __init__( **kwargs ): super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.type = 'SelfDependencyTumblingWindowTriggerReference' + self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str self.offset = kwargs['offset'] self.size = kwargs.get('size', None) @@ -27548,7 +28917,7 @@ def __init__( **kwargs ): super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) - self.type = 'SelfHosted' + self.type = 'SelfHosted' # type: str self.linked_info = kwargs.get('linked_info', None) @@ -27785,7 +29154,7 @@ def __init__( **kwargs ): super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.type = 'SelfHosted' + self.type = 'SelfHosted' # type: str self.create_time = None self.task_queue_id = None self.internal_channel_encryption = None @@ -27885,7 +29254,7 @@ def __init__( **kwargs ): super(ServiceNowLinkedService, self).__init__(**kwargs) - self.type = 'ServiceNow' + self.type = 'ServiceNow' # type: str self.endpoint = kwargs['endpoint'] self.authentication_type = kwargs['authentication_type'] self.username = kwargs.get('username', None) @@ -27952,7 +29321,7 @@ def __init__( **kwargs ): super(ServiceNowObjectDataset, self).__init__(**kwargs) - self.type = 'ServiceNowObject' + self.type = 'ServiceNowObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -28006,7 +29375,7 @@ def __init__( **kwargs ): super(ServiceNowSource, self).__init__(**kwargs) - self.type = 'ServiceNowSource' + self.type = 'ServiceNowSource' # type: str self.query = kwargs.get('query', None) @@ -28055,7 +29424,7 @@ def __init__( **kwargs ): super(SetVariableActivity, self).__init__(**kwargs) - self.type = 'SetVariable' + self.type = 'SetVariable' # type: str self.variable_name = kwargs.get('variable_name', None) self.value = kwargs.get('value', None) @@ -28094,7 +29463,7 @@ def __init__( **kwargs ): super(SftpLocation, self).__init__(**kwargs) - self.type = 'SftpLocation' + self.type = 'SftpLocation' # type: str class SftpReadSettings(StoreReadSettings): @@ -28163,7 +29532,7 @@ def __init__( **kwargs ): super(SftpReadSettings, self).__init__(**kwargs) - self.type = 'SftpReadSettings' + self.type = 'SftpReadSettings' # type: str self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) @@ -28262,7 +29631,7 @@ def __init__( **kwargs ): super(SftpServerLinkedService, self).__init__(**kwargs) - self.type = 'Sftp' + self.type = 'Sftp' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -28318,7 +29687,7 @@ def __init__( **kwargs ): super(SftpWriteSettings, self).__init__(**kwargs) - self.type = 'SftpWriteSettings' + self.type = 'SftpWriteSettings' # type: str self.operation_timeout = kwargs.get('operation_timeout', None) self.use_temp_file_rename = kwargs.get('use_temp_file_rename', None) @@ -28389,7 +29758,7 @@ def __init__( **kwargs ): super(SharePointOnlineListLinkedService, self).__init__(**kwargs) - self.type = 'SharePointOnlineList' + self.type = 'SharePointOnlineList' # type: str self.site_url = kwargs['site_url'] self.tenant_id = kwargs['tenant_id'] self.service_principal_id = kwargs['service_principal_id'] @@ -28452,7 +29821,7 @@ def __init__( **kwargs ): super(SharePointOnlineListResourceDataset, self).__init__(**kwargs) - self.type = 'SharePointOnlineListResource' + self.type = 'SharePointOnlineListResource' # type: str self.list_name = kwargs.get('list_name', None) @@ -28503,7 +29872,7 @@ def __init__( **kwargs ): super(SharePointOnlineListSource, self).__init__(**kwargs) - self.type = 'SharePointOnlineListSource' + self.type = 'SharePointOnlineListSource' # type: str self.query = kwargs.get('query', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) @@ -28572,7 +29941,7 @@ def __init__( **kwargs ): super(ShopifyLinkedService, self).__init__(**kwargs) - self.type = 'Shopify' + self.type = 'Shopify' # type: str self.host = kwargs['host'] self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -28635,7 +30004,7 @@ def __init__( **kwargs ): super(ShopifyObjectDataset, self).__init__(**kwargs) - self.type = 'ShopifyObject' + self.type = 'ShopifyObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -28689,7 +30058,7 @@ def __init__( **kwargs ): super(ShopifySource, self).__init__(**kwargs) - self.type = 'ShopifySource' + self.type = 'ShopifySource' # type: str self.query = kwargs.get('query', None) @@ -28777,7 +30146,7 @@ def __init__( **kwargs ): super(SnowflakeDataset, self).__init__(**kwargs) - self.type = 'SnowflakeTable' + self.type = 'SnowflakeTable' # type: str self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -28820,7 +30189,7 @@ def __init__( **kwargs ): super(SnowflakeExportCopyCommand, self).__init__(**kwargs) - self.type = 'SnowflakeExportCopyCommand' + self.type = 'SnowflakeExportCopyCommand' # type: str self.additional_copy_options = kwargs.get('additional_copy_options', None) self.additional_format_options = kwargs.get('additional_format_options', None) @@ -28863,7 +30232,7 @@ def __init__( **kwargs ): super(SnowflakeImportCopyCommand, self).__init__(**kwargs) - self.type = 'SnowflakeImportCopyCommand' + self.type = 'SnowflakeImportCopyCommand' # type: str self.additional_copy_options = kwargs.get('additional_copy_options', None) self.additional_format_options = kwargs.get('additional_format_options', None) @@ -28919,7 +30288,7 @@ def __init__( **kwargs ): super(SnowflakeLinkedService, self).__init__(**kwargs) - self.type = 'Snowflake' + self.type = 'Snowflake' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -28978,7 +30347,7 @@ def __init__( **kwargs ): super(SnowflakeSink, self).__init__(**kwargs) - self.type = 'SnowflakeSink' + self.type = 'SnowflakeSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) self.import_settings = kwargs.get('import_settings', None) @@ -29027,7 +30396,7 @@ def __init__( **kwargs ): super(SnowflakeSource, self).__init__(**kwargs) - self.type = 'SnowflakeSource' + self.type = 'SnowflakeSource' # type: str self.query = kwargs.get('query', None) self.export_settings = kwargs.get('export_settings', None) @@ -29131,7 +30500,7 @@ def __init__( **kwargs ): super(SparkLinkedService, self).__init__(**kwargs) - self.type = 'Spark' + self.type = 'Spark' # type: str self.host = kwargs['host'] self.port = kwargs['port'] self.server_type = kwargs.get('server_type', None) @@ -29210,7 +30579,7 @@ def __init__( **kwargs ): super(SparkObjectDataset, self).__init__(**kwargs) - self.type = 'SparkObject' + self.type = 'SparkObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -29266,11 +30635,11 @@ def __init__( **kwargs ): super(SparkSource, self).__init__(**kwargs) - self.type = 'SparkSource' + self.type = 'SparkSource' # type: str self.query = kwargs.get('query', None) -class SqlDWSink(CopySink): +class SQLDWSink(CopySink): """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. @@ -29338,8 +30707,8 @@ def __init__( self, **kwargs ): - super(SqlDWSink, self).__init__(**kwargs) - self.type = 'SqlDWSink' + super(SQLDWSink, self).__init__(**kwargs) + self.type = 'SqlDWSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) self.allow_poly_base = kwargs.get('allow_poly_base', None) self.poly_base_settings = kwargs.get('poly_base_settings', None) @@ -29348,7 +30717,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SqlDWSource(TabularSource): +class SQLDWSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -29386,9 +30755,9 @@ class SqlDWSource(TabularSource): :type stored_procedure_parameters: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -29407,15 +30776,15 @@ class SqlDWSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( self, **kwargs ): - super(SqlDWSource, self).__init__(**kwargs) - self.type = 'SqlDWSource' + super(SQLDWSource, self).__init__(**kwargs) + self.type = 'SqlDWSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) @@ -29423,7 +30792,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SqlMiSink(CopySink): +class SQLMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. @@ -29492,8 +30861,8 @@ def __init__( self, **kwargs ): - super(SqlMiSink, self).__init__(**kwargs) - self.type = 'SqlMISink' + super(SQLMiSink, self).__init__(**kwargs) + self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -29502,7 +30871,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SqlMiSource(TabularSource): +class SQLMiSource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -29541,9 +30910,9 @@ class SqlMiSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -29563,15 +30932,15 @@ class SqlMiSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( self, **kwargs ): - super(SqlMiSource, self).__init__(**kwargs) - self.type = 'SqlMISource' + super(SQLMiSource, self).__init__(**kwargs) + self.type = 'SqlMISource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) @@ -29580,7 +30949,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SqlPartitionSettings(msrest.serialization.Model): +class SQLPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be @@ -29609,13 +30978,13 @@ def __init__( self, **kwargs ): - super(SqlPartitionSettings, self).__init__(**kwargs) + super(SQLPartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) self.partition_upper_bound = kwargs.get('partition_upper_bound', None) self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class SqlServerLinkedService(LinkedService): +class SQLServerLinkedService(LinkedService): """SQL Server linked service. All required parameters must be populated in order to send to Azure. @@ -29669,15 +31038,15 @@ def __init__( self, **kwargs ): - super(SqlServerLinkedService, self).__init__(**kwargs) - self.type = 'SqlServer' + super(SQLServerLinkedService, self).__init__(**kwargs) + self.type = 'SqlServer' # type: str self.connection_string = kwargs['connection_string'] self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class SqlServerSink(CopySink): +class SQLServerSink(CopySink): """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. @@ -29746,8 +31115,8 @@ def __init__( self, **kwargs ): - super(SqlServerSink, self).__init__(**kwargs) - self.type = 'SqlServerSink' + super(SQLServerSink, self).__init__(**kwargs) + self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -29756,7 +31125,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SqlServerSource(TabularSource): +class SQLServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -29795,9 +31164,9 @@ class SqlServerSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -29817,15 +31186,15 @@ class SqlServerSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( self, **kwargs ): - super(SqlServerSource, self).__init__(**kwargs) - self.type = 'SqlServerSource' + super(SQLServerSource, self).__init__(**kwargs) + self.type = 'SqlServerSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) @@ -29834,7 +31203,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SqlServerStoredProcedureActivity(ExecutionActivity): +class SQLServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. @@ -29888,13 +31257,13 @@ def __init__( self, **kwargs ): - super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) - self.type = 'SqlServerStoredProcedure' + super(SQLServerStoredProcedureActivity, self).__init__(**kwargs) + self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = kwargs['stored_procedure_name'] self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) -class SqlServerTableDataset(Dataset): +class SQLServerTableDataset(Dataset): """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. @@ -29956,14 +31325,14 @@ def __init__( self, **kwargs ): - super(SqlServerTableDataset, self).__init__(**kwargs) - self.type = 'SqlServerTable' + super(SQLServerTableDataset, self).__init__(**kwargs) + self.type = 'SqlServerTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class SqlSink(CopySink): +class SQLSink(CopySink): """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. @@ -30032,8 +31401,8 @@ def __init__( self, **kwargs ): - super(SqlSink, self).__init__(**kwargs) - self.type = 'SqlSink' + super(SQLSink, self).__init__(**kwargs) + self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -30042,7 +31411,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SqlSource(TabularSource): +class SQLSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -30083,9 +31452,9 @@ class SqlSource(TabularSource): :type isolation_level: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -30105,15 +31474,15 @@ class SqlSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( self, **kwargs ): - super(SqlSource, self).__init__(**kwargs) - self.type = 'SqlSource' + super(SQLSource, self).__init__(**kwargs) + self.type = 'SqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) @@ -30140,14 +31509,17 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The URL of the Square instance. (i.e. mystore.mysquare.com). + :param connection_properties: Properties used to connect to Square. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object + :param host: The URL of the Square instance. (i.e. mystore.mysquare.com). :type host: object - :param client_id: Required. The client ID associated with your Square application. + :param client_id: The client ID associated with your Square application. :type client_id: object :param client_secret: The client secret associated with your Square application. :type client_secret: ~data_factory_management_client.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square application dashboard. - (i.e. http://localhost:2500). + :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. + http://localhost:2500). :type redirect_uri: object :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -30167,9 +31539,6 @@ class SquareLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - 'redirect_uri': {'required': True}, } _attribute_map = { @@ -30179,6 +31548,7 @@ class SquareLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, @@ -30194,11 +31564,12 @@ def __init__( **kwargs ): super(SquareLinkedService, self).__init__(**kwargs) - self.type = 'Square' - self.host = kwargs['host'] - self.client_id = kwargs['client_id'] + self.type = 'Square' # type: str + self.connection_properties = kwargs.get('connection_properties', None) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) self.client_secret = kwargs.get('client_secret', None) - self.redirect_uri = kwargs['redirect_uri'] + self.redirect_uri = kwargs.get('redirect_uri', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) self.use_host_verification = kwargs.get('use_host_verification', None) self.use_peer_verification = kwargs.get('use_peer_verification', None) @@ -30259,7 +31630,7 @@ def __init__( **kwargs ): super(SquareObjectDataset, self).__init__(**kwargs) - self.type = 'SquareObject' + self.type = 'SquareObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -30313,7 +31684,7 @@ def __init__( **kwargs ): super(SquareSource, self).__init__(**kwargs) - self.type = 'SquareSource' + self.type = 'SquareSource' # type: str self.query = kwargs.get('query', None) @@ -30431,7 +31802,7 @@ def __init__( **kwargs ): super(SsisObjectMetadata, self).__init__(**kwargs) - self.type = None + self.type = None # type: Optional[str] self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) @@ -30475,7 +31846,7 @@ def __init__( **kwargs ): super(SsisEnvironment, self).__init__(**kwargs) - self.type = 'Environment' + self.type = 'Environment' # type: str self.folder_id = kwargs.get('folder_id', None) self.variables = kwargs.get('variables', None) @@ -30604,21 +31975,19 @@ def __init__( **kwargs ): super(SsisFolder, self).__init__(**kwargs) - self.type = 'Folder' + self.type = 'Folder' # type: str class SsisLogLocation(msrest.serialization.Model): """SSIS package execution log location. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File". - :vartype type: str + :param type: Required. The type of SSIS log location. Possible values include: "File". + :type type: str or ~data_factory_management_client.models.SsisLogLocationType :param access_credential: The package execution log access credential. :type access_credential: ~data_factory_management_client.models.SsisAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 @@ -30629,7 +31998,7 @@ class SsisLogLocation(msrest.serialization.Model): _validation = { 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, } _attribute_map = { @@ -30639,14 +32008,13 @@ class SsisLogLocation(msrest.serialization.Model): 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, } - type = "File" - def __init__( self, **kwargs ): super(SsisLogLocation, self).__init__(**kwargs) self.log_path = kwargs['log_path'] + self.type = kwargs['type'] self.access_credential = kwargs.get('access_credential', None) self.log_refresh_interval = kwargs.get('log_refresh_interval', None) @@ -30749,7 +32117,7 @@ def __init__( **kwargs ): super(SsisPackage, self).__init__(**kwargs) - self.type = 'Package' + self.type = 'Package' # type: str self.folder_id = kwargs.get('folder_id', None) self.project_version = kwargs.get('project_version', None) self.project_id = kwargs.get('project_id', None) @@ -30923,7 +32291,7 @@ def __init__( **kwargs ): super(SsisProject, self).__init__(**kwargs) - self.type = 'Project' + self.type = 'Project' # type: str self.folder_id = kwargs.get('folder_id', None) self.version = kwargs.get('version', None) self.environment_refs = kwargs.get('environment_refs', None) @@ -31122,7 +32490,7 @@ def __init__( **kwargs ): super(SwitchActivity, self).__init__(**kwargs) - self.type = 'Switch' + self.type = 'Switch' # type: str self.on = kwargs['on'] self.cases = kwargs.get('cases', None) self.default_activities = kwargs.get('default_activities', None) @@ -31219,7 +32587,7 @@ def __init__( **kwargs ): super(SybaseLinkedService, self).__init__(**kwargs) - self.type = 'Sybase' + self.type = 'Sybase' # type: str self.server = kwargs['server'] self.database = kwargs['database'] self.schema = kwargs.get('schema', None) @@ -31278,7 +32646,7 @@ def __init__( **kwargs ): super(SybaseSource, self).__init__(**kwargs) - self.type = 'SybaseSource' + self.type = 'SybaseSource' # type: str self.query = kwargs.get('query', None) @@ -31336,7 +32704,7 @@ def __init__( **kwargs ): super(SybaseTableDataset, self).__init__(**kwargs) - self.type = 'SybaseTable' + self.type = 'SybaseTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -31399,7 +32767,7 @@ def __init__( **kwargs ): super(TabularTranslator, self).__init__(**kwargs) - self.type = 'TabularTranslator' + self.type = 'TabularTranslator' # type: str self.column_mappings = kwargs.get('column_mappings', None) self.schema_mapping = kwargs.get('schema_mapping', None) self.collection_reference = kwargs.get('collection_reference', None) @@ -31409,6 +32777,74 @@ def __init__( self.type_conversion_settings = kwargs.get('type_conversion_settings', None) +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarReadSettings, self).__init__(**kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -31471,7 +32907,7 @@ def __init__( **kwargs ): super(TeradataLinkedService, self).__init__(**kwargs) - self.type = 'Teradata' + self.type = 'Teradata' # type: str self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -31569,7 +33005,7 @@ def __init__( **kwargs ): super(TeradataSource, self).__init__(**kwargs) - self.type = 'TeradataSource' + self.type = 'TeradataSource' # type: str self.query = kwargs.get('query', None) self.partition_option = kwargs.get('partition_option', None) self.partition_settings = kwargs.get('partition_settings', None) @@ -31633,7 +33069,7 @@ def __init__( **kwargs ): super(TeradataTableDataset, self).__init__(**kwargs) - self.type = 'TeradataTable' + self.type = 'TeradataTable' # type: str self.database = kwargs.get('database', None) self.table = kwargs.get('table', None) @@ -31706,7 +33142,7 @@ def __init__( **kwargs ): super(TextFormat, self).__init__(**kwargs) - self.type = 'TextFormat' + self.type = 'TextFormat' # type: str self.column_delimiter = kwargs.get('column_delimiter', None) self.row_delimiter = kwargs.get('row_delimiter', None) self.escape_char = kwargs.get('escape_char', None) @@ -31751,7 +33187,7 @@ def __init__( **kwargs ): super(TriggerDependencyReference, self).__init__(**kwargs) - self.type = 'TriggerDependencyReference' + self.type = 'TriggerDependencyReference' # type: str self.reference_trigger = kwargs['reference_trigger'] @@ -32155,7 +33591,7 @@ def __init__( **kwargs ): super(TumblingWindowTrigger, self).__init__(**kwargs) - self.type = 'TumblingWindowTrigger' + self.type = 'TumblingWindowTrigger' # type: str self.pipeline = kwargs['pipeline'] self.frequency = kwargs['frequency'] self.interval = kwargs['interval'] @@ -32203,7 +33639,7 @@ def __init__( **kwargs ): super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) - self.type = 'TumblingWindowTriggerDependencyReference' + self.type = 'TumblingWindowTriggerDependencyReference' # type: str self.offset = kwargs.get('offset', None) self.size = kwargs.get('size', None) @@ -32308,7 +33744,7 @@ def __init__( **kwargs ): super(UntilActivity, self).__init__(**kwargs) - self.type = 'Until' + self.type = 'Until' # type: str self.expression = kwargs['expression'] self.timeout = kwargs.get('timeout', None) self.activities = kwargs['activities'] @@ -32496,7 +33932,7 @@ def __init__( **kwargs ): super(ValidationActivity, self).__init__(**kwargs) - self.type = 'Validation' + self.type = 'Validation' # type: str self.timeout = kwargs.get('timeout', None) self.sleep = kwargs.get('sleep', None) self.minimum_size = kwargs.get('minimum_size', None) @@ -32583,7 +34019,7 @@ def __init__( **kwargs ): super(VerticaLinkedService, self).__init__(**kwargs) - self.type = 'Vertica' + self.type = 'Vertica' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -32639,7 +34075,7 @@ def __init__( **kwargs ): super(VerticaSource, self).__init__(**kwargs) - self.type = 'VerticaSource' + self.type = 'VerticaSource' # type: str self.query = kwargs.get('query', None) @@ -32706,7 +34142,7 @@ def __init__( **kwargs ): super(VerticaTableDataset, self).__init__(**kwargs) - self.type = 'VerticaTable' + self.type = 'VerticaTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -32755,7 +34191,7 @@ def __init__( **kwargs ): super(WaitActivity, self).__init__(**kwargs) - self.type = 'Wait' + self.type = 'Wait' # type: str self.wait_time_in_seconds = kwargs['wait_time_in_seconds'] @@ -32835,7 +34271,7 @@ def __init__( **kwargs ): super(WebActivity, self).__init__(**kwargs) - self.type = 'WebActivity' + self.type = 'WebActivity' # type: str self.method = kwargs['method'] self.url = kwargs['url'] self.headers = kwargs.get('headers', None) @@ -32925,7 +34361,7 @@ def __init__( ): super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = kwargs['url'] - self.authentication_type = None + self.authentication_type = None # type: Optional[str] class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): @@ -32957,7 +34393,7 @@ def __init__( **kwargs ): super(WebAnonymousAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Anonymous' + self.authentication_type = 'Anonymous' # type: str class WebBasicAuthentication(WebLinkedServiceTypeProperties): @@ -32998,7 +34434,7 @@ def __init__( **kwargs ): super(WebBasicAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Basic' + self.authentication_type = 'Basic' # type: str self.username = kwargs['username'] self.password = kwargs['password'] @@ -33040,7 +34476,7 @@ def __init__( **kwargs ): super(WebClientCertificateAuthentication, self).__init__(**kwargs) - self.authentication_type = 'ClientCertificate' + self.authentication_type = 'ClientCertificate' # type: str self.pfx = kwargs['pfx'] self.password = kwargs['password'] @@ -33048,8 +34484,6 @@ def __init__( class WebHookActivity(Activity): """WebHook activity. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -33065,8 +34499,8 @@ class WebHookActivity(Activity): :type depends_on: list[~data_factory_management_client.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~data_factory_management_client.models.UserProperty] - :ivar method: Required. Rest API method for target endpoint. Default value: "POST". - :vartype method: str + :param method: Required. Rest API method for target endpoint. Possible values include: "POST". + :type method: str or ~data_factory_management_client.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -33083,17 +34517,17 @@ class WebHookActivity(Activity): :type body: object :param authentication: Authentication method used for calling the endpoint. :type authentication: ~data_factory_management_client.models.WebActivityAuthentication - :param report_status_on_call_back: When set to true, - statusCode, output and error in callback request body will be - consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in - callback request. Default is false. Type: boolean (or Expression with resultType boolean). + :param report_status_on_call_back: When set to true, statusCode, output and error in callback + request body will be consumed by activity. The activity can be marked as failed by setting + statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with + resultType boolean). :type report_status_on_call_back: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, + 'method': {'required': True}, 'url': {'required': True}, } @@ -33113,14 +34547,13 @@ class WebHookActivity(Activity): 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, } - method = "POST" - def __init__( self, **kwargs ): super(WebHookActivity, self).__init__(**kwargs) - self.type = 'WebHook' + self.type = 'WebHook' # type: str + self.method = kwargs['method'] self.url = kwargs['url'] self.timeout = kwargs.get('timeout', None) self.headers = kwargs.get('headers', None) @@ -33171,7 +34604,7 @@ def __init__( **kwargs ): super(WebLinkedService, self).__init__(**kwargs) - self.type = 'Web' + self.type = 'Web' # type: str self.type_properties = kwargs['type_properties'] @@ -33217,7 +34650,7 @@ def __init__( **kwargs ): super(WebSource, self).__init__(**kwargs) - self.type = 'WebSource' + self.type = 'WebSource' # type: str self.additional_columns = kwargs.get('additional_columns', None) @@ -33281,7 +34714,7 @@ def __init__( **kwargs ): super(WebTableDataset, self).__init__(**kwargs) - self.type = 'WebTable' + self.type = 'WebTable' # type: str self.index = kwargs['index'] self.path = kwargs.get('path', None) @@ -33304,7 +34737,10 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The endpoint of the Xero server. (i.e. api.xero.com). + :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object + :param host: The endpoint of the Xero server. (i.e. api.xero.com). :type host: object :param consumer_key: The consumer key associated with the Xero application. :type consumer_key: ~data_factory_management_client.models.SecretBase @@ -33330,7 +34766,6 @@ class XeroLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { @@ -33340,6 +34775,7 @@ class XeroLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, @@ -33354,8 +34790,9 @@ def __init__( **kwargs ): super(XeroLinkedService, self).__init__(**kwargs) - self.type = 'Xero' - self.host = kwargs['host'] + self.type = 'Xero' # type: str + self.connection_properties = kwargs.get('connection_properties', None) + self.host = kwargs.get('host', None) self.consumer_key = kwargs.get('consumer_key', None) self.private_key = kwargs.get('private_key', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -33418,7 +34855,7 @@ def __init__( **kwargs ): super(XeroObjectDataset, self).__init__(**kwargs) - self.type = 'XeroObject' + self.type = 'XeroObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -33472,7 +34909,7 @@ def __init__( **kwargs ): super(XeroSource, self).__init__(**kwargs) - self.type = 'XeroSource' + self.type = 'XeroSource' # type: str self.query = kwargs.get('query', None) @@ -33543,7 +34980,7 @@ def __init__( **kwargs ): super(XmlDataset, self).__init__(**kwargs) - self.type = 'Xml' + self.type = 'Xml' # type: str self.location = kwargs.get('location', None) self.encoding_name = kwargs.get('encoding_name', None) self.null_value = kwargs.get('null_value', None) @@ -33565,6 +35002,12 @@ class XmlReadSettings(FormatReadSettings): :param validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). :type validation_mode: object + :param detect_data_type: Indicates whether type detection is enabled when reading the xml + files. Type: boolean (or Expression with resultType boolean). + :type detect_data_type: object + :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + boolean (or Expression with resultType boolean). + :type namespaces: object :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: @@ -33581,6 +35024,8 @@ class XmlReadSettings(FormatReadSettings): 'type': {'key': 'type', 'type': 'str'}, 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, 'validation_mode': {'key': 'validationMode', 'type': 'object'}, + 'detect_data_type': {'key': 'detectDataType', 'type': 'object'}, + 'namespaces': {'key': 'namespaces', 'type': 'object'}, 'namespace_prefixes': {'key': 'namespacePrefixes', 'type': 'object'}, } @@ -33589,9 +35034,11 @@ def __init__( **kwargs ): super(XmlReadSettings, self).__init__(**kwargs) - self.type = 'XmlReadSettings' + self.type = 'XmlReadSettings' # type: str self.compression_properties = kwargs.get('compression_properties', None) self.validation_mode = kwargs.get('validation_mode', None) + self.detect_data_type = kwargs.get('detect_data_type', None) + self.namespaces = kwargs.get('namespaces', None) self.namespace_prefixes = kwargs.get('namespace_prefixes', None) @@ -33643,7 +35090,7 @@ def __init__( **kwargs ): super(XmlSource, self).__init__(**kwargs) - self.type = 'XmlSource' + self.type = 'XmlSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) self.additional_columns = kwargs.get('additional_columns', None) @@ -33679,7 +35126,7 @@ def __init__( **kwargs ): super(ZipDeflateReadSettings, self).__init__(**kwargs) - self.type = 'ZipDeflateReadSettings' + self.type = 'ZipDeflateReadSettings' # type: str self.preserve_zip_file_name_as_folder = kwargs.get('preserve_zip_file_name_as_folder', None) @@ -33701,7 +35148,10 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). + :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object + :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). :type endpoint: object :param access_token: The access token for Zoho authentication. :type access_token: ~data_factory_management_client.models.SecretBase @@ -33723,7 +35173,6 @@ class ZohoLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, } _attribute_map = { @@ -33733,6 +35182,7 @@ class ZohoLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -33746,8 +35196,9 @@ def __init__( **kwargs ): super(ZohoLinkedService, self).__init__(**kwargs) - self.type = 'Zoho' - self.endpoint = kwargs['endpoint'] + self.type = 'Zoho' # type: str + self.connection_properties = kwargs.get('connection_properties', None) + self.endpoint = kwargs.get('endpoint', None) self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) self.use_host_verification = kwargs.get('use_host_verification', None) @@ -33809,7 +35260,7 @@ def __init__( **kwargs ): super(ZohoObjectDataset, self).__init__(**kwargs) - self.type = 'ZohoObject' + self.type = 'ZohoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -33863,5 +35314,5 @@ def __init__( **kwargs ): super(ZohoSource, self).__init__(**kwargs) - self.type = 'ZohoSource' + self.type = 'ZohoSource' # type: str self.query = kwargs.get('query', None) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index 7f4d2c95d5c..2028e32e942 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -99,7 +99,7 @@ def __init__( super(Activity, self).__init__(**kwargs) self.additional_properties = additional_properties self.name = name - self.type: str = 'Activity' + self.type = 'Activity' # type: str self.description = description self.depends_on = depends_on self.user_properties = user_properties @@ -378,7 +378,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBAtlasLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -411,7 +411,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbAtlas': 'MongoDBAtlasLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -426,7 +426,7 @@ def __init__( ): super(LinkedService, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'LinkedService' + self.type = 'LinkedService' # type: str self.connect_via = connect_via self.description = description self.parameters = parameters @@ -530,7 +530,7 @@ def __init__( **kwargs ): super(AmazonMwsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonMWS' + self.type = 'AmazonMWS' # type: str self.endpoint = endpoint self.marketplace_id = marketplace_id self.seller_id = seller_id @@ -547,7 +547,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBAtlasCollectionDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -593,7 +593,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDBAtlasCollectionDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -611,7 +611,7 @@ def __init__( ): super(Dataset, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'Dataset' + self.type = 'Dataset' # type: str self.description = description self.structure = structure self.schema = schema @@ -685,7 +685,7 @@ def __init__( **kwargs ): super(AmazonMwsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AmazonMWSObject' + self.type = 'AmazonMWSObject' # type: str self.table_name = table_name @@ -693,7 +693,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSqlApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBAtlasSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -726,7 +726,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDBAtlasSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -740,7 +740,7 @@ def __init__( ): super(CopySource, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'CopySource' + self.type = 'CopySource' # type: str self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections @@ -750,7 +750,7 @@ class TabularSource(CopySource): """Copy activity sources of tabular type. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMiSource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySQLSource, AzurePostgreSQLSource, AzureSQLSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySQLSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSQLSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SQLDWSource, SQLMiSource, SQLServerSource, SQLSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. All required parameters must be populated in order to send to Azure. @@ -791,7 +791,7 @@ class TabularSource(CopySource): } _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMiSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySQLSource', 'AzurePostgreSqlSource': 'AzurePostgreSQLSource', 'AzureSqlSource': 'AzureSQLSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySQLSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSQLSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SQLDWSource', 'SqlMISource': 'SQLMiSource', 'SqlServerSource': 'SQLServerSource', 'SqlSource': 'SQLSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} } def __init__( @@ -806,7 +806,7 @@ def __init__( **kwargs ): super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'TabularSource' + self.type = 'TabularSource' # type: str self.query_timeout = query_timeout self.additional_columns = additional_columns @@ -869,7 +869,7 @@ def __init__( **kwargs ): super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'AmazonMWSSource' + self.type = 'AmazonMWSSource' # type: str self.query = query @@ -949,7 +949,7 @@ def __init__( **kwargs ): super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonRedshift' + self.type = 'AmazonRedshift' # type: str self.server = server self.username = username self.password = password @@ -1021,7 +1021,7 @@ def __init__( **kwargs ): super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'AmazonRedshiftSource' + self.type = 'AmazonRedshiftSource' # type: str self.query = query self.redshift_unload_settings = redshift_unload_settings @@ -1101,7 +1101,7 @@ def __init__( **kwargs ): super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AmazonRedshiftTable' + self.type = 'AmazonRedshiftTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -1206,7 +1206,7 @@ def __init__( **kwargs ): super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AmazonS3Object' + self.type = 'AmazonS3Object' # type: str self.bucket_name = bucket_name self.key = key self.prefix = prefix @@ -1235,6 +1235,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1245,6 +1248,9 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. Type: string + (or Expression with resultType string). + :type session_token: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1262,9 +1268,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1276,17 +1284,21 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, access_key_id: Optional[object] = None, secret_access_key: Optional["SecretBase"] = None, service_url: Optional[object] = None, + session_token: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonS3' + self.type = 'AmazonS3' # type: str + self.authentication_type = authentication_type self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url + self.session_token = session_token self.encrypted_credential = encrypted_credential @@ -1336,7 +1348,7 @@ def __init__( ): super(DatasetLocation, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'DatasetLocation' + self.type = 'DatasetLocation' # type: str self.folder_path = folder_path self.file_name = file_name @@ -1389,7 +1401,7 @@ def __init__( **kwargs ): super(AmazonS3Location, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'AmazonS3Location' + self.type = 'AmazonS3Location' # type: str self.bucket_name = bucket_name self.version = version @@ -1435,7 +1447,7 @@ def __init__( ): super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'StoreReadSettings' + self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = max_concurrent_connections @@ -1522,7 +1534,7 @@ def __init__( **kwargs ): super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AmazonS3ReadSettings' + self.type = 'AmazonS3ReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -1588,7 +1600,7 @@ def __init__( **kwargs ): super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'AppendVariable' + self.type = 'AppendVariable' # type: str self.variable_name = variable_name self.value = value @@ -1668,7 +1680,7 @@ def __init__( **kwargs ): super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Avro' + self.type = 'Avro' # type: str self.location = location self.avro_compression_codec = avro_compression_codec self.avro_compression_level = avro_compression_level @@ -1718,7 +1730,7 @@ def __init__( ): super(DatasetStorageFormat, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'DatasetStorageFormat' + self.type = 'DatasetStorageFormat' # type: str self.serializer = serializer self.deserializer = deserializer @@ -1759,14 +1771,14 @@ def __init__( **kwargs ): super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type: str = 'AvroFormat' + self.type = 'AvroFormat' # type: str class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSqlApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. All required parameters must be populated in order to send to Azure. @@ -1807,7 +1819,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} } def __init__( @@ -1823,7 +1835,7 @@ def __init__( ): super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'CopySink' + self.type = 'CopySink' # type: str self.write_batch_size = write_batch_size self.write_batch_timeout = write_batch_timeout self.sink_retry_count = sink_retry_count @@ -1892,7 +1904,7 @@ def __init__( **kwargs ): super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AvroSink' + self.type = 'AvroSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -1949,7 +1961,7 @@ def __init__( **kwargs ): super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AvroSource' + self.type = 'AvroSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -1958,7 +1970,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1979,7 +1991,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -1990,7 +2002,7 @@ def __init__( ): super(FormatWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'FormatWriteSettings' + self.type = 'FormatWriteSettings' # type: str class AvroWriteSettings(FormatWriteSettings): @@ -2007,6 +2019,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -2018,6 +2037,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -2026,12 +2047,80 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, record_name: Optional[str] = None, record_namespace: Optional[str] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'AvroWriteSettings' + self.type = 'AvroWriteSettings' # type: str self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzPowerShellSetup, CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzPowerShellSetup': 'AzPowerShellSetup', 'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} + } + + def __init__( + self, + **kwargs + ): + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzPowerShellSetup(CustomSetupBase): + """The express custom setup of installing Azure PowerShell. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param version: Required. The required version of Azure PowerShell to install. + :type version: str + """ + + _validation = { + 'type': {'required': True}, + 'version': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + } + + def __init__( + self, + *, + version: str, + **kwargs + ): + super(AzPowerShellSetup, self).__init__(**kwargs) + self.type = 'AzPowerShellSetup' # type: str + self.version = version class AzureBatchLinkedService(LinkedService): @@ -2111,7 +2200,7 @@ def __init__( **kwargs ): super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBatch' + self.type = 'AzureBatch' # type: str self.account_name = account_name self.access_key = access_key self.batch_uri = batch_uri @@ -2213,7 +2302,7 @@ def __init__( **kwargs ): super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureBlob' + self.type = 'AzureBlob' # type: str self.folder_path = folder_path self.table_root_location = table_root_location self.file_name = file_name @@ -2301,7 +2390,7 @@ def __init__( **kwargs ): super(AzureBlobFsDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureBlobFSFile' + self.type = 'AzureBlobFSFile' # type: str self.folder_path = folder_path self.file_name = file_name self.format = format @@ -2341,6 +2430,10 @@ class AzureBlobFsLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2364,6 +2457,7 @@ class AzureBlobFsLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -2380,16 +2474,18 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(AzureBlobFsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBlobFS' + self.type = 'AzureBlobFS' # type: str self.url = url self.account_key = account_key self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential @@ -2436,7 +2532,7 @@ def __init__( **kwargs ): super(AzureBlobFsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'AzureBlobFSLocation' + self.type = 'AzureBlobFSLocation' # type: str self.file_system = file_system @@ -2518,7 +2614,7 @@ def __init__( **kwargs ): super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureBlobFSReadSettings' + self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -2587,7 +2683,7 @@ def __init__( **kwargs ): super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureBlobFSSink' + self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = copy_behavior @@ -2649,7 +2745,7 @@ def __init__( **kwargs ): super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureBlobFSSource' + self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive @@ -2659,7 +2755,7 @@ class StoreWriteSettings(msrest.serialization.Model): """Connector write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobFsWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. + sub-classes are: AzureBlobFsWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. @@ -2687,7 +2783,7 @@ class StoreWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFsWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFsWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureFileStorageWriteSettings': 'AzureFileStorageWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( @@ -2700,7 +2796,7 @@ def __init__( ): super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'StoreWriteSettings' + self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = max_concurrent_connections self.copy_behavior = copy_behavior @@ -2747,7 +2843,7 @@ def __init__( **kwargs ): super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type: str = 'AzureBlobFSWriteSettings' + self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -2792,6 +2888,10 @@ class AzureBlobStorageLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2817,6 +2917,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } @@ -2836,11 +2937,12 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[str] = None, **kwargs ): super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBlobStorage' + self.type = 'AzureBlobStorage' # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -2849,6 +2951,7 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential @@ -2895,7 +2998,7 @@ def __init__( **kwargs ): super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'AzureBlobStorageLocation' + self.type = 'AzureBlobStorageLocation' # type: str self.container = container @@ -2982,7 +3085,7 @@ def __init__( **kwargs ): super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureBlobStorageReadSettings' + self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -3037,290 +3140,250 @@ def __init__( **kwargs ): super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type: str = 'AzureBlobStorageWriteSettings' + self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~data_factory_management_client.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type database: object """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, *, - domain: object, - access_token: "SecretBase", + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - existing_cluster_id: Optional[object] = None, - instance_pool_id: Optional[object] = None, - new_cluster_version: Optional[object] = None, - new_cluster_num_of_worker: Optional[object] = None, - new_cluster_node_type: Optional[object] = None, - new_cluster_spark_conf: Optional[Dict[str, object]] = None, - new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, - new_cluster_custom_tags: Optional[Dict[str, object]] = None, - new_cluster_driver_node_type: Optional[object] = None, - new_cluster_init_scripts: Optional[object] = None, - new_cluster_enable_elastic_disk: Optional[object] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + table: Optional[object] = None, + database: Optional[object] = None, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDatabricks' - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.instance_pool_id = instance_pool_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential + super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = table + self.database = database -class ExecutionActivity(Activity): - """Base class for all execution activities. +class ExportSettings(msrest.serialization.Model): + """Export command settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, **kwargs ): - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Execution' - self.linked_service_name = linked_service_name - self.policy = policy + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ExportSettings' # type: str -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, } def __init__( self, *, - name: str, - command: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - command_timeout: Optional[object] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureDataExplorerCommand' - self.command = command - self.command_timeout = command_timeout + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. All required parameters must be populated in order to send to Azure. @@ -3337,31 +3400,25 @@ class AzureDataExplorerLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~data_factory_management_client.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, + 'domain': {'required': True}, } _attribute_map = { @@ -3371,39 +3428,529 @@ class AzureDataExplorerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - endpoint: object, - service_principal_id: object, - service_principal_key: "SecretBase", - database: object, - tenant: object, + domain: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + cluster_id: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataExplorer' - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = domain + self.access_token = access_token + self.cluster_id = cluster_id + self.encrypted_credential = encrypted_credential -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = query + self.export_settings = export_settings + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :type access_token: ~data_factory_management_client.models.SecretBase + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :type new_cluster_log_destination: object + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + domain: object, + access_token: "SecretBase", + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + existing_cluster_id: Optional[object] = None, + instance_pool_id: Optional[object] = None, + new_cluster_version: Optional[object] = None, + new_cluster_num_of_worker: Optional[object] = None, + new_cluster_node_type: Optional[object] = None, + new_cluster_spark_conf: Optional[Dict[str, object]] = None, + new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, + new_cluster_custom_tags: Optional[Dict[str, object]] = None, + new_cluster_log_destination: Optional[object] = None, + new_cluster_driver_node_type: Optional[object] = None, + new_cluster_init_scripts: Optional[object] = None, + new_cluster_enable_elastic_disk: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.instance_pool_id = instance_pool_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_log_destination = new_cluster_log_destination + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk + self.encrypted_credential = encrypted_credential + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + } + + def __init__( + self, + *, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + **kwargs + ): + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = linked_service_name + self.policy = policy + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + name: str, + command: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + command_timeout: Optional[object] = None, + **kwargs + ): + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = command + self.command_timeout = command_timeout + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~data_factory_management_client.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + *, + endpoint: object, + service_principal_id: object, + service_principal_key: "SecretBase", + database: object, + tenant: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. @@ -3470,7 +4017,7 @@ def __init__( **kwargs ): super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureDataExplorerSink' + self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json self.flush_immediately = flush_immediately @@ -3540,7 +4087,7 @@ def __init__( **kwargs ): super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureDataExplorerSource' + self.type = 'AzureDataExplorerSource' # type: str self.query = query self.no_truncation = no_truncation self.query_timeout = query_timeout @@ -3612,7 +4159,7 @@ def __init__( **kwargs ): super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureDataExplorerTable' + self.type = 'AzureDataExplorerTable' # type: str self.table = table @@ -3703,7 +4250,7 @@ def __init__( **kwargs ): super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataLakeAnalytics' + self.type = 'AzureDataLakeAnalytics' # type: str self.account_name = account_name self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -3793,7 +4340,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureDataLakeStoreFile' + self.type = 'AzureDataLakeStoreFile' # type: str self.folder_path = folder_path self.file_name = file_name self.format = format @@ -3830,6 +4377,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). :type account_name: object @@ -3861,6 +4412,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, @@ -3879,6 +4431,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, account_name: Optional[object] = None, subscription_id: Optional[object] = None, resource_group_name: Optional[object] = None, @@ -3886,11 +4439,12 @@ def __init__( **kwargs ): super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataLakeStore' + self.type = 'AzureDataLakeStore' # type: str self.data_lake_store_uri = data_lake_store_uri self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.account_name = account_name self.subscription_id = subscription_id self.resource_group_name = resource_group_name @@ -3935,7 +4489,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'AzureDataLakeStoreLocation' + self.type = 'AzureDataLakeStoreLocation' # type: str class AzureDataLakeStoreReadSettings(StoreReadSettings): @@ -4028,7 +4582,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureDataLakeStoreReadSettings' + self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -4103,7 +4657,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureDataLakeStoreSink' + self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel @@ -4156,7 +4710,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureDataLakeStoreSource' + self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = recursive @@ -4203,7 +4757,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type: str = 'AzureDataLakeStoreWriteSettings' + self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = expiry_date_time @@ -4245,6 +4799,9 @@ class AzureFileStorageLinkedService(LinkedService): :param file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). :type file_share: object + :param snapshot: The azure file share snapshot version. Type: string (or Expression with + resultType string). + :type snapshot: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -4270,6 +4827,7 @@ class AzureFileStorageLinkedService(LinkedService): 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, + 'snapshot': {'key': 'typeProperties.snapshot', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -4289,11 +4847,12 @@ def __init__( sas_uri: Optional[object] = None, sas_token: Optional["AzureKeyVaultSecretReference"] = None, file_share: Optional[object] = None, + snapshot: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureFileStorage' + self.type = 'AzureFileStorage' # type: str self.host = host self.user_id = user_id self.password = password @@ -4302,6 +4861,7 @@ def __init__( self.sas_uri = sas_uri self.sas_token = sas_token self.file_share = file_share + self.snapshot = snapshot self.encrypted_credential = encrypted_credential @@ -4343,7 +4903,7 @@ def __init__( **kwargs ): super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'AzureFileStorageLocation' + self.type = 'AzureFileStorageLocation' # type: str class AzureFileStorageReadSettings(StoreReadSettings): @@ -4429,7 +4989,7 @@ def __init__( **kwargs ): super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureFileStorageReadSettings' + self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -4442,6 +5002,46 @@ def __init__( self.modified_datetime_end = modified_datetime_end +class AzureFileStorageWriteSettings(StoreWriteSettings): + """Azure File Storage write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + **kwargs + ): + super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureFileStorageWriteSettings' # type: str + + class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. @@ -4518,7 +5118,7 @@ def __init__( **kwargs ): super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureFunctionActivity' + self.type = 'AzureFunctionActivity' # type: str self.method = method self.function_name = function_name self.headers = headers @@ -4585,7 +5185,7 @@ def __init__( **kwargs ): super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureFunction' + self.type = 'AzureFunction' # type: str self.function_app_url = function_app_url self.function_key = function_key self.encrypted_credential = encrypted_credential @@ -4641,7 +5241,7 @@ def __init__( **kwargs ): super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureKeyVault' + self.type = 'AzureKeyVault' # type: str self.base_url = base_url @@ -4674,7 +5274,7 @@ def __init__( **kwargs ): super(SecretBase, self).__init__(**kwargs) - self.type: Optional[str] = None + self.type = None # type: Optional[str] class AzureKeyVaultSecretReference(SecretBase): @@ -4716,7 +5316,7 @@ def __init__( **kwargs ): super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type: str = 'AzureKeyVaultSecret' + self.type = 'AzureKeyVaultSecret' # type: str self.store = store self.secret_name = secret_name self.secret_version = secret_version @@ -4781,7 +5381,7 @@ def __init__( **kwargs ): super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMariaDB' + self.type = 'AzureMariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -4845,7 +5445,7 @@ def __init__( **kwargs ): super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'AzureMariaDBSource' + self.type = 'AzureMariaDBSource' # type: str self.query = query @@ -4913,7 +5513,7 @@ def __init__( **kwargs ): super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureMariaDBTable' + self.type = 'AzureMariaDBTable' # type: str self.table_name = table_name @@ -4993,7 +5593,7 @@ def __init__( **kwargs ): super(AzureMlBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLBatchExecution' + self.type = 'AzureMLBatchExecution' # type: str self.global_parameters = global_parameters self.web_service_outputs = web_service_outputs self.web_service_inputs = web_service_inputs @@ -5084,7 +5684,7 @@ def __init__( **kwargs ): super(AzureMlExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLExecutePipeline' + self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = ml_pipeline_id self.experiment_name = experiment_name self.ml_pipeline_parameters = ml_pipeline_parameters @@ -5174,7 +5774,7 @@ def __init__( **kwargs ): super(AzureMlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureML' + self.type = 'AzureML' # type: str self.ml_endpoint = ml_endpoint self.api_key = api_key self.update_resource_endpoint = update_resource_endpoint @@ -5268,7 +5868,7 @@ def __init__( **kwargs ): super(AzureMlServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMLService' + self.type = 'AzureMLService' # type: str self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.ml_workspace_name = ml_workspace_name @@ -5351,7 +5951,7 @@ def __init__( **kwargs ): super(AzureMlUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLUpdateResource' + self.type = 'AzureMLUpdateResource' # type: str self.trained_model_name = trained_model_name self.trained_model_linked_service_name = trained_model_linked_service_name self.trained_model_file_path = trained_model_file_path @@ -5392,7 +5992,7 @@ def __init__( self.linked_service_name = linked_service_name -class AzureMySqlLinkedService(LinkedService): +class AzureMySQLLinkedService(LinkedService): """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. @@ -5451,14 +6051,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMySql' + super(AzureMySQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureMySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class AzureMySqlSink(CopySink): +class AzureMySQLSink(CopySink): """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -5515,12 +6115,12 @@ def __init__( pre_copy_script: Optional[object] = None, **kwargs ): - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureMySqlSink' + super(AzureMySQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script -class AzureMySqlSource(TabularSource): +class AzureMySQLSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -5576,12 +6176,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'AzureMySqlSource' + super(AzureMySQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'AzureMySqlSource' # type: str self.query = query -class AzureMySqlTableDataset(Dataset): +class AzureMySQLTableDataset(Dataset): """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -5650,13 +6250,13 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureMySqlTable' + super(AzureMySQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureMySqlTable' # type: str self.table_name = table_name self.table = table -class AzurePostgreSqlLinkedService(LinkedService): +class AzurePostgreSQLLinkedService(LinkedService): """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -5714,14 +6314,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzurePostgreSql' + super(AzurePostgreSQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzurePostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class AzurePostgreSqlSink(CopySink): +class AzurePostgreSQLSink(CopySink): """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. @@ -5778,12 +6378,12 @@ def __init__( pre_copy_script: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzurePostgreSqlSink' + super(AzurePostgreSQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script -class AzurePostgreSqlSource(TabularSource): +class AzurePostgreSQLSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -5840,12 +6440,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'AzurePostgreSqlSource' + super(AzurePostgreSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'AzurePostgreSqlSource' # type: str self.query = query -class AzurePostgreSqlTableDataset(Dataset): +class AzurePostgreSQLTableDataset(Dataset): """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -5919,8 +6519,8 @@ def __init__( schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzurePostgreSqlTable' + super(AzurePostgreSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzurePostgreSqlTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -5979,7 +6579,7 @@ def __init__( **kwargs ): super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureQueueSink' + self.type = 'AzureQueueSink' # type: str class AzureSearchIndexDataset(Dataset): @@ -6048,7 +6648,7 @@ def __init__( **kwargs ): super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSearchIndex' + self.type = 'AzureSearchIndex' # type: str self.index_name = index_name @@ -6111,7 +6711,7 @@ def __init__( **kwargs ): super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureSearchIndexSink' + self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = write_behavior @@ -6175,13 +6775,13 @@ def __init__( **kwargs ): super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSearch' + self.type = 'AzureSearch' # type: str self.url = url self.key = key self.encrypted_credential = encrypted_credential -class AzureSqlDatabaseLinkedService(LinkedService): +class AzureSQLDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. @@ -6213,6 +6813,10 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6236,6 +6840,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -6252,20 +6857,22 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlDatabase' + super(AzureSQLDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlDatabase' # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential -class AzureSqlDWLinkedService(LinkedService): +class AzureSQLDWLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. @@ -6297,6 +6904,10 @@ class AzureSqlDWLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6320,6 +6931,7 @@ class AzureSqlDWLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -6336,20 +6948,22 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlDW' + super(AzureSQLDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlDW' # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential -class AzureSqlDWTableDataset(Dataset): +class AzureSQLDWTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. @@ -6423,14 +7037,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlDWTable' + super(AzureSQLDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlDWTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSqlMiLinkedService(LinkedService): +class AzureSQLMiLinkedService(LinkedService): """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. @@ -6462,6 +7076,10 @@ class AzureSqlMiLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6485,6 +7103,7 @@ class AzureSqlMiLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -6501,20 +7120,22 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSqlMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlMI' + super(AzureSQLMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlMI' # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential -class AzureSqlMiTableDataset(Dataset): +class AzureSQLMiTableDataset(Dataset): """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. @@ -6588,14 +7209,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSqlMiTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlMITable' + super(AzureSQLMiTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlMITable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSqlSink(CopySink): +class AzureSQLSink(CopySink): """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. @@ -6677,8 +7298,8 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureSqlSink' + super(AzureSQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -6687,7 +7308,7 @@ def __init__( self.table_option = table_option -class AzureSqlSource(TabularSource): +class AzureSQLSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -6726,9 +7347,9 @@ class AzureSqlSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -6748,7 +7369,7 @@ class AzureSqlSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( @@ -6764,12 +7385,12 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SqlPartitionOption"]] = None, - partition_settings: Optional["SqlPartitionSettings"] = None, + partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, + partition_settings: Optional["SQLPartitionSettings"] = None, **kwargs ): - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'AzureSqlSource' + super(AzureSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -6778,7 +7399,7 @@ def __init__( self.partition_settings = partition_settings -class AzureSqlTableDataset(Dataset): +class AzureSQLTableDataset(Dataset): """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. @@ -6852,8 +7473,8 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlTable' + super(AzureSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -6927,7 +7548,7 @@ def __init__( **kwargs ): super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureStorage' + self.type = 'AzureStorage' # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -7001,7 +7622,7 @@ def __init__( **kwargs ): super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureTable' + self.type = 'AzureTable' # type: str self.table_name = table_name @@ -7078,7 +7699,7 @@ def __init__( **kwargs ): super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'AzureTableSink' + self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name @@ -7148,7 +7769,7 @@ def __init__( **kwargs ): super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'AzureTableSource' + self.type = 'AzureTableSource' # type: str self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found @@ -7221,7 +7842,7 @@ def __init__( **kwargs ): super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureTableStorage' + self.type = 'AzureTableStorage' # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -7297,7 +7918,7 @@ def __init__( **kwargs ): super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Binary' + self.type = 'Binary' # type: str self.location = location self.compression = compression @@ -7338,7 +7959,7 @@ def __init__( ): super(FormatReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'FormatReadSettings' + self.type = 'FormatReadSettings' # type: str class BinaryReadSettings(FormatReadSettings): @@ -7373,7 +7994,7 @@ def __init__( **kwargs ): super(BinaryReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'BinaryReadSettings' + self.type = 'BinaryReadSettings' # type: str self.compression_properties = compression_properties @@ -7434,7 +8055,7 @@ def __init__( **kwargs ): super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'BinarySink' + self.type = 'BinarySink' # type: str self.store_settings = store_settings @@ -7489,7 +8110,7 @@ def __init__( **kwargs ): super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'BinarySource' + self.type = 'BinarySource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -7545,7 +8166,7 @@ def __init__( ): super(Trigger, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'Trigger' + self.type = 'Trigger' # type: str self.description = description self.runtime_state = None self.annotations = annotations @@ -7605,7 +8226,7 @@ def __init__( **kwargs ): super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'MultiplePipelineTrigger' + self.type = 'MultiplePipelineTrigger' # type: str self.pipelines = pipelines @@ -7683,7 +8304,7 @@ def __init__( **kwargs ): super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.type: str = 'BlobEventsTrigger' + self.type = 'BlobEventsTrigger' # type: str self.blob_path_begins_with = blob_path_begins_with self.blob_path_ends_with = blob_path_ends_with self.ignore_empty_blobs = ignore_empty_blobs @@ -7763,7 +8384,7 @@ def __init__( **kwargs ): super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'BlobSink' + self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header @@ -7828,7 +8449,7 @@ def __init__( **kwargs ): super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'BlobSource' + self.type = 'BlobSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive @@ -7897,7 +8518,7 @@ def __init__( **kwargs ): super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.type: str = 'BlobTrigger' + self.type = 'BlobTrigger' # type: str self.folder_path = folder_path self.max_concurrency = max_concurrency self.linked_service = linked_service @@ -7978,7 +8599,7 @@ def __init__( **kwargs ): super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Cassandra' + self.type = 'Cassandra' # type: str self.host = host self.authentication_type = authentication_type self.port = port @@ -8055,7 +8676,7 @@ def __init__( **kwargs ): super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'CassandraSource' + self.type = 'CassandraSource' # type: str self.query = query self.consistency_level = consistency_level @@ -8130,7 +8751,7 @@ def __init__( **kwargs ): super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CassandraTable' + self.type = 'CassandraTable' # type: str self.table_name = table_name self.keyspace = keyspace @@ -8195,7 +8816,7 @@ def __init__( **kwargs ): super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'ChainingTrigger' + self.type = 'ChainingTrigger' # type: str self.pipeline = pipeline self.depends_on = depends_on self.run_dimension = run_dimension @@ -8244,38 +8865,6 @@ def __init__( self.details = details -class CustomSetupBase(msrest.serialization.Model): - """The base definition of the custom setup. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} - } - - def __init__( - self, - **kwargs - ): - super(CustomSetupBase, self).__init__(**kwargs) - self.type: Optional[str] = None - - class CmdkeySetup(CustomSetupBase): """The custom setup of running cmdkey commands. @@ -8314,7 +8903,7 @@ def __init__( **kwargs ): super(CmdkeySetup, self).__init__(**kwargs) - self.type: str = 'CmdkeySetup' + self.type = 'CmdkeySetup' # type: str self.target_name = target_name self.user_name = user_name self.password = password @@ -8385,7 +8974,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CommonDataServiceForAppsEntity' + self.type = 'CommonDataServiceForAppsEntity' # type: str self.entity_name = entity_name @@ -8512,7 +9101,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CommonDataServiceForApps' + self.type = 'CommonDataServiceForApps' # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -8530,8 +9119,6 @@ def __init__( class CommonDataServiceForAppsSink(CopySink): """A copy activity Common Data Service for Apps sink. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -8554,8 +9141,9 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert". - :vartype write_behavior: str + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -8567,7 +9155,7 @@ class CommonDataServiceForAppsSink(CopySink): _validation = { 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, + 'write_behavior': {'required': True}, } _attribute_map = { @@ -8583,11 +9171,10 @@ class CommonDataServiceForAppsSink(CopySink): 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } - write_behavior = "Upsert" - def __init__( self, *, + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], additional_properties: Optional[Dict[str, object]] = None, write_batch_size: Optional[object] = None, write_batch_timeout: Optional[object] = None, @@ -8599,7 +9186,8 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'CommonDataServiceForAppsSink' + self.type = 'CommonDataServiceForAppsSink' # type: str + self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name @@ -8657,7 +9245,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'CommonDataServiceForAppsSource' + self.type = 'CommonDataServiceForAppsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -8694,7 +9282,7 @@ def __init__( **kwargs ): super(ComponentSetup, self).__init__(**kwargs) - self.type: str = 'ComponentSetup' + self.type = 'ComponentSetup' # type: str self.component_name = component_name self.license_key = license_key @@ -8703,7 +9291,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -8724,7 +9312,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -8735,7 +9323,7 @@ def __init__( ): super(CompressionReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'CompressionReadSettings' + self.type = 'CompressionReadSettings' # type: str class ConcurLinkedService(LinkedService): @@ -8756,6 +9344,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -8792,6 +9383,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -8811,6 +9403,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, password: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -8819,7 +9412,8 @@ def __init__( **kwargs ): super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Concur' + self.type = 'Concur' # type: str + self.connection_properties = connection_properties self.client_id = client_id self.username = username self.password = password @@ -8893,7 +9487,7 @@ def __init__( **kwargs ): super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ConcurObject' + self.type = 'ConcurObject' # type: str self.table_name = table_name @@ -8955,10 +9549,45 @@ def __init__( **kwargs ): super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'ConcurSource' + self.type = 'ConcurSource' # type: str self.query = query +class ConnectionStateProperties(msrest.serialization.Model): + """The connection state of a managed private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar actions_required: The actions required on the managed private endpoint. + :vartype actions_required: str + :ivar description: The managed private endpoint description. + :vartype description: str + :ivar status: The approval status. + :vartype status: str + """ + + _validation = { + 'actions_required': {'readonly': True}, + 'description': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ConnectionStateProperties, self).__init__(**kwargs) + self.actions_required = None + self.description = None + self.status = None + + class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. @@ -9004,7 +9633,7 @@ def __init__( **kwargs ): super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Container' + self.type = 'Container' # type: str class CopyActivity(ExecutionActivity): @@ -9058,9 +9687,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -9100,6 +9731,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -9128,6 +9760,7 @@ def __init__( enable_skip_incompatible_row: Optional[object] = None, redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, log_storage_settings: Optional["LogStorageSettings"] = None, + log_settings: Optional["LogSettings"] = None, preserve_rules: Optional[List[object]] = None, preserve: Optional[List[object]] = None, validate_data_consistency: Optional[object] = None, @@ -9135,7 +9768,7 @@ def __init__( **kwargs ): super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Copy' + self.type = 'Copy' # type: str self.inputs = inputs self.outputs = outputs self.source = source @@ -9148,12 +9781,41 @@ def __init__( self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.log_storage_settings = log_storage_settings + self.log_settings = log_settings self.preserve_rules = preserve_rules self.preserve = preserve self.validate_data_consistency = validate_data_consistency self.skip_error_file = skip_error_file +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + *, + log_level: Optional[object] = None, + enable_reliable_logging: Optional[object] = None, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -9190,7 +9852,7 @@ def __init__( ): super(CopyTranslator, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'CopyTranslator' + self.type = 'CopyTranslator' # type: str class CosmosDBLinkedService(LinkedService): @@ -9262,7 +9924,7 @@ def __init__( **kwargs ): super(CosmosDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CosmosDb' + self.type = 'CosmosDb' # type: str self.connection_string = connection_string self.account_endpoint = account_endpoint self.database = database @@ -9336,7 +9998,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CosmosDbMongoDbApiCollection' + self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = collection @@ -9397,7 +10059,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CosmosDbMongoDbApi' + self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = connection_string self.database = database @@ -9461,7 +10123,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'CosmosDbMongoDbApiSink' + self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior @@ -9535,7 +10197,7 @@ def __init__( **kwargs ): super(CosmosDBMongoDBApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'CosmosDbMongoDbApiSource' + self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size @@ -9543,7 +10205,7 @@ def __init__( self.additional_columns = additional_columns -class CosmosDBSqlApiCollectionDataset(Dataset): +class CosmosDBSQLApiCollectionDataset(Dataset): """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -9608,12 +10270,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(CosmosDBSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CosmosDbSqlApiCollection' + super(CosmosDBSQLApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = collection_name -class CosmosDBSqlApiSink(CopySink): +class CosmosDBSQLApiSink(CopySink): """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. @@ -9670,12 +10332,12 @@ def __init__( write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDBSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'CosmosDbSqlApiSink' + super(CosmosDBSQLApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior -class CosmosDBSqlApiSource(CopySource): +class CosmosDBSQLApiSource(CopySource): """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -9702,6 +10364,9 @@ class CosmosDBSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + Expression with resultType boolean). + :type detect_datetime: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] @@ -9720,6 +10385,7 @@ class CosmosDBSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -9733,14 +10399,16 @@ def __init__( query: Optional[object] = None, page_size: Optional[object] = None, preferred_regions: Optional[object] = None, + detect_datetime: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDBSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'CosmosDbSqlApiSource' + super(CosmosDBSQLApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbSqlApiSource' # type: str self.query = query self.page_size = page_size self.preferred_regions = preferred_regions + self.detect_datetime = detect_datetime self.additional_columns = additional_columns @@ -9803,7 +10471,7 @@ def __init__( **kwargs ): super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Couchbase' + self.type = 'Couchbase' # type: str self.connection_string = connection_string self.cred_string = cred_string self.encrypted_credential = encrypted_credential @@ -9867,7 +10535,7 @@ def __init__( **kwargs ): super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'CouchbaseSource' + self.type = 'CouchbaseSource' # type: str self.query = query @@ -9935,7 +10603,7 @@ def __init__( **kwargs ): super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CouchbaseTable' + self.type = 'CouchbaseTable' # type: str self.table_name = table_name @@ -10157,7 +10825,7 @@ def __init__( **kwargs ): super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Custom' + self.type = 'Custom' # type: str self.command = command self.resource_linked_service = resource_linked_service self.folder_path = folder_path @@ -10256,7 +10924,7 @@ def __init__( **kwargs ): super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CustomDataset' + self.type = 'CustomDataset' # type: str self.type_properties = type_properties @@ -10309,7 +10977,7 @@ def __init__( **kwargs ): super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CustomDataSource' + self.type = 'CustomDataSource' # type: str self.type_properties = type_properties @@ -10382,7 +11050,7 @@ def __init__( **kwargs ): super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksNotebook' + self.type = 'DatabricksNotebook' # type: str self.notebook_path = notebook_path self.base_parameters = base_parameters self.libraries = libraries @@ -10456,7 +11124,7 @@ def __init__( **kwargs ): super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksSparkJar' + self.type = 'DatabricksSparkJar' # type: str self.main_class_name = main_class_name self.parameters = parameters self.libraries = libraries @@ -10529,7 +11197,7 @@ def __init__( **kwargs ): super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksSparkPython' + self.type = 'DatabricksSparkPython' # type: str self.python_file = python_file self.parameters = parameters self.libraries = libraries @@ -10572,7 +11240,7 @@ def __init__( **kwargs ): super(DataFlow, self).__init__(**kwargs) - self.type: Optional[str] = None + self.type = None # type: Optional[str] self.description = description self.annotations = annotations self.folder = folder @@ -10698,8 +11366,9 @@ class DataFlowDebugPackage(msrest.serialization.Model): :type parameters_debug_settings_parameters: dict[str, object] :param dataset_parameters: Parameters for dataset. :type dataset_parameters: object - :param folder_path: Folder path for staging blob. - :type folder_path: str + :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :type folder_path: object :ivar type: Linked service reference type. Default value: "LinkedServiceReference". :vartype type: str :param reference_name: Reference LinkedService name. @@ -10724,7 +11393,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): 'source_settings': {'key': 'debugSettings.sourceSettings', 'type': '[DataFlowSourceSetting]'}, 'parameters_debug_settings_parameters': {'key': 'debugSettings.parameters', 'type': '{object}'}, 'dataset_parameters': {'key': 'debugSettings.datasetParameters', 'type': 'object'}, - 'folder_path': {'key': 'staging.folderPath', 'type': 'str'}, + 'folder_path': {'key': 'staging.folderPath', 'type': 'object'}, 'type': {'key': 'staging.linkedService.type', 'type': 'str'}, 'reference_name': {'key': 'staging.linkedService.referenceName', 'type': 'str'}, 'parameters_staging_linked_service_parameters': {'key': 'staging.linkedService.parameters', 'type': '{object}'}, @@ -10744,7 +11413,7 @@ def __init__( source_settings: Optional[List["DataFlowSourceSetting"]] = None, parameters_debug_settings_parameters: Optional[Dict[str, object]] = None, dataset_parameters: Optional[object] = None, - folder_path: Optional[str] = None, + folder_path: Optional[object] = None, reference_name: Optional[str] = None, parameters_staging_linked_service_parameters: Optional[Dict[str, object]] = None, name: Optional[str] = None, @@ -11228,8 +11897,9 @@ class DataFlowStagingInfo(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param folder_path: Folder path for staging blob. - :type folder_path: str + :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :type folder_path: object :ivar type: Linked service reference type. Default value: "LinkedServiceReference". :vartype type: str :param reference_name: Reference LinkedService name. @@ -11243,7 +11913,7 @@ class DataFlowStagingInfo(msrest.serialization.Model): } _attribute_map = { - 'folder_path': {'key': 'folderPath', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'type': {'key': 'linkedService.type', 'type': 'str'}, 'reference_name': {'key': 'linkedService.referenceName', 'type': 'str'}, 'parameters': {'key': 'linkedService.parameters', 'type': '{object}'}, @@ -11254,7 +11924,7 @@ class DataFlowStagingInfo(msrest.serialization.Model): def __init__( self, *, - folder_path: Optional[str] = None, + folder_path: Optional[object] = None, reference_name: Optional[str] = None, parameters: Optional[Dict[str, object]] = None, **kwargs @@ -11354,7 +12024,7 @@ def __init__( **kwargs ): super(DataLakeAnalyticsUsqlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DataLakeAnalyticsU-SQL' + self.type = 'DataLakeAnalyticsU-SQL' # type: str self.script_path = script_path self.script_linked_service = script_linked_service self.degree_of_parallelism = degree_of_parallelism @@ -11368,7 +12038,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -11389,7 +12059,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -11400,7 +12070,7 @@ def __init__( ): super(DatasetCompression, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'DatasetCompression' + self.type = 'DatasetCompression' # type: str class DatasetBZip2Compression(DatasetCompression): @@ -11431,7 +12101,7 @@ def __init__( **kwargs ): super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'BZip2' + self.type = 'BZip2' # type: str class DatasetDataElement(msrest.serialization.Model): @@ -11523,7 +12193,7 @@ def __init__( **kwargs ): super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'Deflate' + self.type = 'Deflate' # type: str self.level = level @@ -11580,7 +12250,7 @@ def __init__( **kwargs ): super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'GZip' + self.type = 'GZip' # type: str self.level = level @@ -11733,6 +12403,73 @@ def __init__( self.type = type +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZip' # type: str + self.level = level + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -11765,15 +12502,13 @@ def __init__( **kwargs ): super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'ZipDeflate' + self.type = 'ZipDeflate' # type: str self.level = level class Db2LinkedService(LinkedService): """Linked service for DB2 data source. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -11799,9 +12534,9 @@ class Db2LinkedService(LinkedService): :param database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type database: object - :ivar authentication_type: AuthenticationType to be used for connection. It is mutually - exclusive with connectionString property. Default value: "Basic". - :vartype authentication_type: str + :param authentication_type: AuthenticationType to be used for connection. It is mutually + exclusive with connectionString property. Possible values include: "Basic". + :type authentication_type: str or ~data_factory_management_client.models.Db2AuthenticationType :param username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type username: object @@ -11822,7 +12557,6 @@ class Db2LinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'authentication_type': {'constant': True}, } _attribute_map = { @@ -11843,8 +12577,6 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - authentication_type = "Basic" - def __init__( self, *, @@ -11856,6 +12588,7 @@ def __init__( connection_string: Optional[object] = None, server: Optional[object] = None, database: Optional[object] = None, + authentication_type: Optional[Union[str, "Db2AuthenticationType"]] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, package_collection: Optional[object] = None, @@ -11864,10 +12597,11 @@ def __init__( **kwargs ): super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Db2' + self.type = 'Db2' # type: str self.connection_string = connection_string self.server = server self.database = database + self.authentication_type = authentication_type self.username = username self.password = password self.package_collection = package_collection @@ -11932,7 +12666,7 @@ def __init__( **kwargs ): super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'Db2Source' + self.type = 'Db2Source' # type: str self.query = query @@ -12010,7 +12744,7 @@ def __init__( **kwargs ): super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Db2Table' + self.type = 'Db2Table' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -12099,7 +12833,7 @@ def __init__( **kwargs ): super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Delete' + self.type = 'Delete' # type: str self.recursive = recursive self.max_concurrent_connections = max_concurrent_connections self.enable_logging = enable_logging @@ -12170,7 +12904,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -12238,7 +12972,7 @@ def __init__( **kwargs ): super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DelimitedText' + self.type = 'DelimitedText' # type: str self.location = location self.column_delimiter = column_delimiter self.row_delimiter = row_delimiter @@ -12288,7 +13022,7 @@ def __init__( **kwargs ): super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'DelimitedTextReadSettings' + self.type = 'DelimitedTextReadSettings' # type: str self.skip_line_count = skip_line_count self.compression_properties = compression_properties @@ -12354,7 +13088,7 @@ def __init__( **kwargs ): super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DelimitedTextSink' + self.type = 'DelimitedTextSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -12415,7 +13149,7 @@ def __init__( **kwargs ): super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DelimitedTextSource' + self.type = 'DelimitedTextSource' # type: str self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns @@ -12437,6 +13171,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -12449,6 +13190,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -12457,12 +13200,16 @@ def __init__( file_extension: object, additional_properties: Optional[Dict[str, object]] = None, quote_all_text: Optional[object] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'DelimitedTextWriteSettings' + self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class DependencyReference(msrest.serialization.Model): @@ -12494,7 +13241,7 @@ def __init__( **kwargs ): super(DependencyReference, self).__init__(**kwargs) - self.type: Optional[str] = None + self.type = None # type: Optional[str] class DistcpSettings(msrest.serialization.Model): @@ -12605,7 +13352,7 @@ def __init__( **kwargs ): super(DocumentDBCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DocumentDbCollection' + self.type = 'DocumentDbCollection' # type: str self.collection_name = collection_name @@ -12672,7 +13419,7 @@ def __init__( **kwargs ): super(DocumentDBCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DocumentDbCollectionSink' + self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior @@ -12739,7 +13486,7 @@ def __init__( **kwargs ): super(DocumentDBCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DocumentDbCollectionSource' + self.type = 'DocumentDbCollectionSource' # type: str self.query = query self.nesting_separator = nesting_separator self.query_timeout = query_timeout @@ -12805,7 +13552,7 @@ def __init__( **kwargs ): super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Drill' + self.type = 'Drill' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -12869,7 +13616,7 @@ def __init__( **kwargs ): super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'DrillSource' + self.type = 'DrillSource' # type: str self.query = query @@ -12947,7 +13694,7 @@ def __init__( **kwargs ): super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DrillTable' + self.type = 'DrillTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -13093,7 +13840,7 @@ def __init__( **kwargs ): super(DynamicsAxLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'DynamicsAX' + self.type = 'DynamicsAX' # type: str self.url = url self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -13168,7 +13915,7 @@ def __init__( **kwargs ): super(DynamicsAxResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsAXResource' + self.type = 'DynamicsAXResource' # type: str self.path = path @@ -13237,7 +13984,7 @@ def __init__( **kwargs ): super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'DynamicsAXSource' + self.type = 'DynamicsAXSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -13307,7 +14054,7 @@ def __init__( **kwargs ): super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsCrmEntity' + self.type = 'DynamicsCrmEntity' # type: str self.entity_name = entity_name @@ -13432,7 +14179,7 @@ def __init__( **kwargs ): super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'DynamicsCrm' + self.type = 'DynamicsCrm' # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -13450,8 +14197,6 @@ def __init__( class DynamicsCrmSink(CopySink): """A copy activity Dynamics CRM sink. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -13474,8 +14219,9 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert". - :vartype write_behavior: str + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -13487,7 +14233,7 @@ class DynamicsCrmSink(CopySink): _validation = { 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, + 'write_behavior': {'required': True}, } _attribute_map = { @@ -13503,11 +14249,10 @@ class DynamicsCrmSink(CopySink): 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } - write_behavior = "Upsert" - def __init__( self, *, + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], additional_properties: Optional[Dict[str, object]] = None, write_batch_size: Optional[object] = None, write_batch_timeout: Optional[object] = None, @@ -13519,7 +14264,8 @@ def __init__( **kwargs ): super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DynamicsCrmSink' + self.type = 'DynamicsCrmSink' # type: str + self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name @@ -13577,7 +14323,7 @@ def __init__( **kwargs ): super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DynamicsCrmSource' + self.type = 'DynamicsCrmSource' # type: str self.query = query self.additional_columns = additional_columns @@ -13647,7 +14393,7 @@ def __init__( **kwargs ): super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsEntity' + self.type = 'DynamicsEntity' # type: str self.entity_name = entity_name @@ -13769,7 +14515,7 @@ def __init__( **kwargs ): super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Dynamics' + self.type = 'Dynamics' # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -13787,8 +14533,6 @@ def __init__( class DynamicsSink(CopySink): """A copy activity Dynamics sink. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -13811,8 +14555,9 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert". - :vartype write_behavior: str + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -13824,7 +14569,7 @@ class DynamicsSink(CopySink): _validation = { 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, + 'write_behavior': {'required': True}, } _attribute_map = { @@ -13840,11 +14585,10 @@ class DynamicsSink(CopySink): 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } - write_behavior = "Upsert" - def __init__( self, *, + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], additional_properties: Optional[Dict[str, object]] = None, write_batch_size: Optional[object] = None, write_batch_timeout: Optional[object] = None, @@ -13856,7 +14600,8 @@ def __init__( **kwargs ): super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DynamicsSink' + self.type = 'DynamicsSink' # type: str + self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name @@ -13914,7 +14659,7 @@ def __init__( **kwargs ): super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'DynamicsSource' + self.type = 'DynamicsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -14000,7 +14745,7 @@ def __init__( **kwargs ): super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Eloqua' + self.type = 'Eloqua' # type: str self.endpoint = endpoint self.username = username self.password = password @@ -14074,7 +14819,7 @@ def __init__( **kwargs ): super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'EloquaObject' + self.type = 'EloquaObject' # type: str self.table_name = table_name @@ -14136,7 +14881,7 @@ def __init__( **kwargs ): super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'EloquaSource' + self.type = 'EloquaSource' # type: str self.query = query @@ -14200,7 +14945,7 @@ def __init__( **kwargs ): super(EnvironmentVariableSetup, self).__init__(**kwargs) - self.type: str = 'EnvironmentVariableSetup' + self.type = 'EnvironmentVariableSetup' # type: str self.variable_name = variable_name self.variable_value = variable_value @@ -14293,7 +15038,7 @@ def __init__( **kwargs ): super(ExcelDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Excel' + self.type = 'Excel' # type: str self.location = location self.sheet_name = sheet_name self.range = range @@ -14354,7 +15099,7 @@ def __init__( **kwargs ): super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'ExcelSource' + self.type = 'ExcelSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -14430,7 +15175,7 @@ def __init__( **kwargs ): super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'ExecuteDataFlow' + self.type = 'ExecuteDataFlow' # type: str self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime @@ -14524,7 +15269,7 @@ def __init__( **kwargs ): super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'ExecutePipeline' + self.type = 'ExecutePipeline' # type: str self.pipeline = pipeline self.parameters = parameters self.wait_on_completion = wait_on_completion @@ -14641,7 +15386,7 @@ def __init__( **kwargs ): super(ExecuteSsisPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'ExecuteSSISPackage' + self.type = 'ExecuteSSISPackage' # type: str self.package_location = package_location self.runtime = runtime self.logging_level = logging_level @@ -14656,43 +15401,60 @@ def __init__( self.log_location = log_location -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. +class ExposureControlBatchRequest(msrest.serialization.Model): + """A list of exposure control features. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str + :param exposure_control_requests: Required. List of exposure control features. + :type exposure_control_requests: + list[~data_factory_management_client.models.ExposureControlRequest] """ _validation = { - 'type': {'required': True}, + 'exposure_control_requests': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'exposure_control_requests': {'key': 'exposureControlRequests', 'type': '[ExposureControlRequest]'}, } - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + def __init__( + self, + *, + exposure_control_requests: List["ExposureControlRequest"], + **kwargs + ): + super(ExposureControlBatchRequest, self).__init__(**kwargs) + self.exposure_control_requests = exposure_control_requests + + +class ExposureControlBatchResponse(msrest.serialization.Model): + """A list of exposure control feature values. + + All required parameters must be populated in order to send to Azure. + + :param exposure_control_responses: Required. List of exposure control feature values. + :type exposure_control_responses: + list[~data_factory_management_client.models.ExposureControlResponse] + """ + + _validation = { + 'exposure_control_responses': {'required': True}, + } + + _attribute_map = { + 'exposure_control_responses': {'key': 'exposureControlResponses', 'type': '[ExposureControlResponse]'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + exposure_control_responses: List["ExposureControlResponse"], **kwargs ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type: str = 'ExportSettings' + super(ExposureControlBatchResponse, self).__init__(**kwargs) + self.exposure_control_responses = exposure_control_responses class ExposureControlRequest(msrest.serialization.Model): @@ -14870,6 +15632,9 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -14896,6 +15661,7 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -14907,6 +15673,7 @@ def __init__( identity: Optional["FactoryIdentity"] = None, repo_configuration: Optional["FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "GlobalParameterSpecification"]] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, **kwargs ): super(Factory, self).__init__(location=location, tags=tags, **kwargs) @@ -14917,6 +15684,7 @@ def __init__( self.version = None self.repo_configuration = repo_configuration self.global_parameters = global_parameters + self.public_network_access = public_network_access class FactoryRepoConfiguration(msrest.serialization.Model): @@ -14973,7 +15741,7 @@ def __init__( **kwargs ): super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.type: Optional[str] = None + self.type = None # type: Optional[str] self.account_name = account_name self.repository_name = repository_name self.collaboration_branch = collaboration_branch @@ -15032,7 +15800,7 @@ def __init__( **kwargs ): super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.type: str = 'FactoryGitHubConfiguration' + self.type = 'FactoryGitHubConfiguration' # type: str self.host_name = host_name @@ -15215,7 +15983,7 @@ def __init__( **kwargs ): super(FactoryVstsConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.type: str = 'FactoryVSTSConfiguration' + self.type = 'FactoryVSTSConfiguration' # type: str self.project_name = project_name self.tenant_id = tenant_id @@ -15285,7 +16053,7 @@ def __init__( **kwargs ): super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'FileServer' + self.type = 'FileServer' # type: str self.host = host self.user_id = user_id self.password = password @@ -15330,7 +16098,7 @@ def __init__( **kwargs ): super(FileServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'FileServerLocation' + self.type = 'FileServerLocation' # type: str class FileServerReadSettings(StoreReadSettings): @@ -15416,7 +16184,7 @@ def __init__( **kwargs ): super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'FileServerReadSettings' + self.type = 'FileServerReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -15466,7 +16234,7 @@ def __init__( **kwargs ): super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type: str = 'FileServerWriteSettings' + self.type = 'FileServerWriteSettings' # type: str class FileShareDataset(Dataset): @@ -15562,7 +16330,7 @@ def __init__( **kwargs ): super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'FileShare' + self.type = 'FileShare' # type: str self.folder_path = folder_path self.file_name = file_name self.modified_datetime_start = modified_datetime_start @@ -15629,7 +16397,7 @@ def __init__( **kwargs ): super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'FileSystemSink' + self.type = 'FileSystemSink' # type: str self.copy_behavior = copy_behavior @@ -15686,7 +16454,7 @@ def __init__( **kwargs ): super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'FileSystemSource' + self.type = 'FileSystemSource' # type: str self.recursive = recursive self.additional_columns = additional_columns @@ -15746,7 +16514,7 @@ def __init__( **kwargs ): super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Filter' + self.type = 'Filter' # type: str self.items = items self.condition = condition @@ -15816,7 +16584,7 @@ def __init__( **kwargs ): super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'ForEach' + self.type = 'ForEach' # type: str self.is_sequential = is_sequential self.batch_count = batch_count self.items = items @@ -15895,7 +16663,7 @@ def __init__( **kwargs ): super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'FtpReadSettings' + self.type = 'FtpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -15992,7 +16760,7 @@ def __init__( **kwargs ): super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'FtpServer' + self.type = 'FtpServer' # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -16041,7 +16809,7 @@ def __init__( **kwargs ): super(FtpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'FtpServerLocation' + self.type = 'FtpServerLocation' # type: str class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): @@ -16141,7 +16909,7 @@ def __init__( **kwargs ): super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'GetMetadata' + self.type = 'GetMetadata' # type: str self.dataset = dataset self.field_list = field_list self.store_settings = store_settings @@ -16368,7 +17136,7 @@ def __init__( **kwargs ): super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleAdWords' + self.type = 'GoogleAdWords' # type: str self.client_customer_id = client_customer_id self.developer_token = developer_token self.authentication_type = authentication_type @@ -16446,7 +17214,7 @@ def __init__( **kwargs ): super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GoogleAdWordsObject' + self.type = 'GoogleAdWordsObject' # type: str self.table_name = table_name @@ -16508,7 +17276,7 @@ def __init__( **kwargs ): super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'GoogleAdWordsSource' + self.type = 'GoogleAdWordsSource' # type: str self.query = query @@ -16621,7 +17389,7 @@ def __init__( **kwargs ): super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleBigQuery' + self.type = 'GoogleBigQuery' # type: str self.project = project self.additional_projects = additional_projects self.request_google_drive_scope = request_google_drive_scope @@ -16711,7 +17479,7 @@ def __init__( **kwargs ): super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GoogleBigQueryObject' + self.type = 'GoogleBigQueryObject' # type: str self.table_name = table_name self.table = table self.dataset = dataset @@ -16775,7 +17543,7 @@ def __init__( **kwargs ): super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'GoogleBigQuerySource' + self.type = 'GoogleBigQuerySource' # type: str self.query = query @@ -16846,7 +17614,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleCloudStorage' + self.type = 'GoogleCloudStorage' # type: str self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url @@ -16901,7 +17669,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'GoogleCloudStorageLocation' + self.type = 'GoogleCloudStorageLocation' # type: str self.bucket_name = bucket_name self.version = version @@ -16989,7 +17757,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'GoogleCloudStorageReadSettings' + self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -17061,7 +17829,7 @@ def __init__( **kwargs ): super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Greenplum' + self.type = 'Greenplum' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -17125,7 +17893,7 @@ def __init__( **kwargs ): super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'GreenplumSource' + self.type = 'GreenplumSource' # type: str self.query = query @@ -17203,7 +17971,7 @@ def __init__( **kwargs ): super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GreenplumTable' + self.type = 'GreenplumTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -17310,7 +18078,7 @@ def __init__( **kwargs ): super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HBase' + self.type = 'HBase' # type: str self.host = host self.port = port self.http_path = http_path @@ -17388,7 +18156,7 @@ def __init__( **kwargs ): super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HBaseObject' + self.type = 'HBaseObject' # type: str self.table_name = table_name @@ -17450,7 +18218,7 @@ def __init__( **kwargs ): super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'HBaseSource' + self.type = 'HBaseSource' # type: str self.query = query @@ -17524,7 +18292,7 @@ def __init__( **kwargs ): super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hdfs' + self.type = 'Hdfs' # type: str self.url = url self.authentication_type = authentication_type self.encrypted_credential = encrypted_credential @@ -17570,7 +18338,7 @@ def __init__( **kwargs ): super(HdfsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'HdfsLocation' + self.type = 'HdfsLocation' # type: str class HdfsReadSettings(StoreReadSettings): @@ -17612,6 +18380,9 @@ class HdfsReadSettings(StoreReadSettings): :type modified_datetime_end: object :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~data_factory_management_client.models.DistcpSettings + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object """ _validation = { @@ -17631,6 +18402,7 @@ class HdfsReadSettings(StoreReadSettings): 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, } def __init__( @@ -17647,10 +18419,11 @@ def __init__( modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, distcp_settings: Optional["DistcpSettings"] = None, + delete_files_after_completion: Optional[object] = None, **kwargs ): super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'HdfsReadSettings' + self.type = 'HdfsReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -17660,6 +18433,7 @@ def __init__( self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.distcp_settings = distcp_settings + self.delete_files_after_completion = delete_files_after_completion class HdfsSource(CopySource): @@ -17714,7 +18488,7 @@ def __init__( **kwargs ): super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'HdfsSource' + self.type = 'HdfsSource' # type: str self.recursive = recursive self.distcp_settings = distcp_settings @@ -17807,7 +18581,7 @@ def __init__( **kwargs ): super(HdInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightHive' + self.type = 'HDInsightHive' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -17903,7 +18677,7 @@ def __init__( **kwargs ): super(HdInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HDInsight' + self.type = 'HDInsight' # type: str self.cluster_uri = cluster_uri self.user_name = user_name self.password = password @@ -18003,7 +18777,7 @@ def __init__( **kwargs ): super(HdInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightMapReduce' + self.type = 'HDInsightMapReduce' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -18232,7 +19006,7 @@ def __init__( **kwargs ): super(HdInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HDInsightOnDemand' + self.type = 'HDInsightOnDemand' # type: str self.cluster_size = cluster_size self.time_to_live = time_to_live self.version = version @@ -18348,7 +19122,7 @@ def __init__( **kwargs ): super(HdInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightPig' + self.type = 'HDInsightPig' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -18449,7 +19223,7 @@ def __init__( **kwargs ): super(HdInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightSpark' + self.type = 'HDInsightSpark' # type: str self.root_path = root_path self.entry_file_path = entry_file_path self.arguments = arguments @@ -18570,7 +19344,7 @@ def __init__( **kwargs ): super(HdInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightStreaming' + self.type = 'HDInsightStreaming' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -18716,7 +19490,7 @@ def __init__( **kwargs ): super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hive' + self.type = 'Hive' # type: str self.host = host self.port = port self.server_type = server_type @@ -18810,7 +19584,7 @@ def __init__( **kwargs ): super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HiveObject' + self.type = 'HiveObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -18874,7 +19648,7 @@ def __init__( **kwargs ): super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'HiveSource' + self.type = 'HiveSource' # type: str self.query = query @@ -18969,7 +19743,7 @@ def __init__( **kwargs ): super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HttpFile' + self.type = 'HttpFile' # type: str self.relative_url = relative_url self.request_method = request_method self.request_body = request_body @@ -19068,7 +19842,7 @@ def __init__( **kwargs ): super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HttpServer' + self.type = 'HttpServer' # type: str self.url = url self.authentication_type = authentication_type self.user_name = user_name @@ -19141,7 +19915,7 @@ def __init__( **kwargs ): super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'HttpReadSettings' + self.type = 'HttpReadSettings' # type: str self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers @@ -19193,7 +19967,7 @@ def __init__( **kwargs ): super(HttpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'HttpServerLocation' + self.type = 'HttpServerLocation' # type: str self.relative_url = relative_url @@ -19247,7 +20021,7 @@ def __init__( **kwargs ): super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'HttpSource' + self.type = 'HttpSource' # type: str self.http_request_timeout = http_request_timeout @@ -19336,7 +20110,7 @@ def __init__( **kwargs ): super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hubspot' + self.type = 'Hubspot' # type: str self.client_id = client_id self.client_secret = client_secret self.access_token = access_token @@ -19411,7 +20185,7 @@ def __init__( **kwargs ): super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HubspotObject' + self.type = 'HubspotObject' # type: str self.table_name = table_name @@ -19473,7 +20247,7 @@ def __init__( **kwargs ): super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'HubspotSource' + self.type = 'HubspotSource' # type: str self.query = query @@ -19539,7 +20313,7 @@ def __init__( **kwargs ): super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'IfCondition' + self.type = 'IfCondition' # type: str self.expression = expression self.if_true_activities = if_true_activities self.if_false_activities = if_false_activities @@ -19648,7 +20422,7 @@ def __init__( **kwargs ): super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Impala' + self.type = 'Impala' # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -19737,7 +20511,7 @@ def __init__( **kwargs ): super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ImpalaObject' + self.type = 'ImpalaObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -19801,49 +20575,10 @@ def __init__( **kwargs ): super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'ImpalaSource' + self.type = 'ImpalaSource' # type: str self.query = query -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type: str = 'ImportSettings' - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -19921,7 +20656,7 @@ def __init__( **kwargs ): super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Informix' + self.type = 'Informix' # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -19988,7 +20723,7 @@ def __init__( **kwargs ): super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'InformixSink' + self.type = 'InformixSink' # type: str self.pre_copy_script = pre_copy_script @@ -20049,7 +20784,7 @@ def __init__( **kwargs ): super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'InformixSource' + self.type = 'InformixSource' # type: str self.query = query @@ -20118,7 +20853,7 @@ def __init__( **kwargs ): super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'InformixTable' + self.type = 'InformixTable' # type: str self.table_name = table_name @@ -20163,7 +20898,7 @@ def __init__( ): super(IntegrationRuntime, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'IntegrationRuntime' + self.type = 'IntegrationRuntime' # type: str self.description = description @@ -20870,7 +21605,7 @@ def __init__( ): super(IntegrationRuntimeStatus, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'IntegrationRuntimeStatus' + self.type = 'IntegrationRuntimeStatus' # type: str self.data_factory_name = None self.state = None @@ -21066,7 +21801,7 @@ def __init__( **kwargs ): super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Jira' + self.type = 'Jira' # type: str self.host = host self.port = port self.username = username @@ -21141,7 +21876,7 @@ def __init__( **kwargs ): super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'JiraObject' + self.type = 'JiraObject' # type: str self.table_name = table_name @@ -21203,7 +21938,7 @@ def __init__( **kwargs ): super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'JiraSource' + self.type = 'JiraSource' # type: str self.query = query @@ -21283,7 +22018,7 @@ def __init__( **kwargs ): super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Json' + self.type = 'Json' # type: str self.location = location self.encoding_name = encoding_name self.compression = compression @@ -21357,7 +22092,7 @@ def __init__( **kwargs ): super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type: str = 'JsonFormat' + self.type = 'JsonFormat' # type: str self.file_pattern = file_pattern self.nesting_separator = nesting_separator self.encoding_name = encoding_name @@ -21397,7 +22132,7 @@ def __init__( **kwargs ): super(JsonReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'JsonReadSettings' + self.type = 'JsonReadSettings' # type: str self.compression_properties = compression_properties @@ -21462,7 +22197,7 @@ def __init__( **kwargs ): super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'JsonSink' + self.type = 'JsonSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -21523,7 +22258,7 @@ def __init__( **kwargs ): super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'JsonSource' + self.type = 'JsonSource' # type: str self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns @@ -21563,7 +22298,7 @@ def __init__( **kwargs ): super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'JsonWriteSettings' + self.type = 'JsonWriteSettings' # type: str self.file_pattern = file_pattern @@ -21644,7 +22379,7 @@ def __init__( **kwargs ): super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type: Optional[str] = None + self.authorization_type = None # type: Optional[str] class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): @@ -21676,7 +22411,7 @@ def __init__( **kwargs ): super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.authorization_type: str = 'Key' + self.authorization_type = 'Key' # type: str self.key = key @@ -21709,7 +22444,7 @@ def __init__( **kwargs ): super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.authorization_type: str = 'RBAC' + self.authorization_type = 'RBAC' # type: str self.resource_id = resource_id @@ -21888,8 +22623,81 @@ def __init__( self.properties = properties +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + path: Optional[object] = None, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = linked_service_name + self.path = path + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + *, + log_location_settings: "LogLocationSettings", + enable_copy_activity_log: Optional[object] = None, + copy_activity_log_settings: Optional["CopyActivityLogSettings"] = None, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = enable_copy_activity_log + self.copy_activity_log_settings = copy_activity_log_settings + self.log_location_settings = log_location_settings + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -21901,6 +22709,12 @@ class LogStorageSettings(msrest.serialization.Model): :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object """ _validation = { @@ -21911,6 +22725,8 @@ class LogStorageSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'path': {'key': 'path', 'type': 'object'}, + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, } def __init__( @@ -21919,12 +22735,16 @@ def __init__( linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, path: Optional[object] = None, + log_level: Optional[object] = None, + enable_reliable_logging: Optional[object] = None, **kwargs ): super(LogStorageSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging class LookupActivity(ExecutionActivity): @@ -21995,7 +22815,7 @@ def __init__( **kwargs ): super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Lookup' + self.type = 'Lookup' # type: str self.source = source self.dataset = dataset self.first_row_only = first_row_only @@ -22076,7 +22896,7 @@ def __init__( **kwargs ): super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Magento' + self.type = 'Magento' # type: str self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -22149,7 +22969,7 @@ def __init__( **kwargs ): super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MagentoObject' + self.type = 'MagentoObject' # type: str self.table_name = table_name @@ -22211,7 +23031,7 @@ def __init__( **kwargs ): super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'MagentoSource' + self.type = 'MagentoSource' # type: str self.query = query @@ -22265,7 +23085,7 @@ def __init__( **kwargs ): super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.type: str = 'Managed' + self.type = 'Managed' # type: str self.state = None self.compute_properties = compute_properties self.ssis_properties = ssis_properties @@ -22478,13 +23298,285 @@ def __init__( **kwargs ): super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'Managed' + self.type = 'Managed' # type: str self.create_time = None self.nodes = None self.other_errors = None self.last_operation = None +class ManagedPrivateEndpoint(msrest.serialization.Model): + """Properties of a managed private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param connection_state: The managed private endpoint connection state. + :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :param fqdns: Fully qualified domain names. + :type fqdns: list[str] + :param group_id: The groupId to which the managed private endpoint is created. + :type group_id: str + :ivar is_reserved: Denotes whether the managed private endpoint is reserved. + :vartype is_reserved: bool + :param private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :type private_link_resource_id: str + :ivar provisioning_state: The managed private endpoint provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'is_reserved': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connection_state': {'key': 'connectionState', 'type': 'ConnectionStateProperties'}, + 'fqdns': {'key': 'fqdns', 'type': '[str]'}, + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'is_reserved': {'key': 'isReserved', 'type': 'bool'}, + 'private_link_resource_id': {'key': 'privateLinkResourceId', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connection_state: Optional["ConnectionStateProperties"] = None, + fqdns: Optional[List[str]] = None, + group_id: Optional[str] = None, + private_link_resource_id: Optional[str] = None, + **kwargs + ): + super(ManagedPrivateEndpoint, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.connection_state = connection_state + self.fqdns = fqdns + self.group_id = group_id + self.is_reserved = None + self.private_link_resource_id = private_link_resource_id + self.provisioning_state = None + + +class ManagedPrivateEndpointListResponse(msrest.serialization.Model): + """A list of managed private endpoint resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of managed private endpoints. + :type value: list[~data_factory_management_client.models.ManagedPrivateEndpointResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ManagedPrivateEndpointResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["ManagedPrivateEndpointResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(ManagedPrivateEndpointListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ManagedPrivateEndpointResource(SubResource): + """Managed private endpoint resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param connection_state: The managed private endpoint connection state. + :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :param fqdns: Fully qualified domain names. + :type fqdns: list[str] + :param group_id: The groupId to which the managed private endpoint is created. + :type group_id: str + :ivar is_reserved: Denotes whether the managed private endpoint is reserved. + :vartype is_reserved: bool + :param private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :type private_link_resource_id: str + :ivar provisioning_state: The managed private endpoint provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'is_reserved': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'connection_state': {'key': 'properties.connectionState', 'type': 'ConnectionStateProperties'}, + 'fqdns': {'key': 'properties.fqdns', 'type': '[str]'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'is_reserved': {'key': 'properties.isReserved', 'type': 'bool'}, + 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + connection_state: Optional["ConnectionStateProperties"] = None, + fqdns: Optional[List[str]] = None, + group_id: Optional[str] = None, + private_link_resource_id: Optional[str] = None, + **kwargs + ): + super(ManagedPrivateEndpointResource, self).__init__(**kwargs) + self.connection_state = connection_state + self.fqdns = fqdns + self.group_id = group_id + self.is_reserved = None + self.private_link_resource_id = private_link_resource_id + self.provisioning_state = None + + +class ManagedVirtualNetwork(msrest.serialization.Model): + """A managed Virtual Network associated with the Azure Data Factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar v_net_id: Managed Virtual Network ID. + :vartype v_net_id: str + :ivar alias: Managed Virtual Network alias. + :vartype alias: str + """ + + _validation = { + 'v_net_id': {'readonly': True}, + 'alias': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'alias': {'key': 'alias', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ManagedVirtualNetwork, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = None + self.alias = None + + +class ManagedVirtualNetworkListResponse(msrest.serialization.Model): + """A list of managed Virtual Network resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of managed Virtual Networks. + :type value: list[~data_factory_management_client.models.ManagedVirtualNetworkResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ManagedVirtualNetworkResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["ManagedVirtualNetworkResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(ManagedVirtualNetworkListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ManagedVirtualNetworkResource(SubResource): + """Managed Virtual Network resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Managed Virtual Network properties. + :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ManagedVirtualNetwork'}, + } + + def __init__( + self, + *, + properties: "ManagedVirtualNetwork", + **kwargs + ): + super(ManagedVirtualNetworkResource, self).__init__(**kwargs) + self.properties = properties + + class MappingDataFlow(DataFlow): """Mapping data flow. @@ -22531,7 +23623,7 @@ def __init__( **kwargs ): super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MappingDataFlow' + self.type = 'MappingDataFlow' # type: str self.sources = sources self.sinks = sinks self.transformations = transformations @@ -22597,7 +23689,7 @@ def __init__( **kwargs ): super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MariaDB' + self.type = 'MariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -22661,7 +23753,7 @@ def __init__( **kwargs ): super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'MariaDBSource' + self.type = 'MariaDBSource' # type: str self.query = query @@ -22729,7 +23821,7 @@ def __init__( **kwargs ): super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MariaDBTable' + self.type = 'MariaDBTable' # type: str self.table_name = table_name @@ -22813,7 +23905,7 @@ def __init__( **kwargs ): super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Marketo' + self.type = 'Marketo' # type: str self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret @@ -22850,13 +23942,419 @@ class MarketoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~data_factory_management_client.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MarketoObject' # type: str + self.table_name = table_name + + +class MarketoSource(TabularSource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + query: Optional[object] = None, + **kwargs + ): + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'MarketoSource' # type: str + self.query = query + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Microsoft Access as + ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~data_factory_management_client.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~data_factory_management_client.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + credential: Optional["SecretBase"] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MicrosoftAccess' # type: str + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MicrosoftAccessSink' # type: str + self.pre_copy_script = pre_copy_script + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + **kwargs + ): + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MicrosoftAccessSource' # type: str + self.query = query + self.additional_columns = additional_columns + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = table_name + + +class MongoDBAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -22869,13 +24367,14 @@ class MarketoObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + collection: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -22883,78 +24382,15 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - **kwargs - ): - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MarketoObject' - self.table_name = table_name - - -class MarketoSource(TabularSource): - """A copy activity Marketo server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, **kwargs ): - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'MarketoSource' - self.query = query + super(MongoDBAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = collection -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. +class MongoDBAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. All required parameters must be populated in order to send to Azure. @@ -22971,31 +24407,19 @@ class MicrosoftAccessLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Microsoft Access as - ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with - resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~data_factory_management_client.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { @@ -23006,103 +24430,29 @@ class MicrosoftAccessLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, *, connection_string: object, + database: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, **kwargs ): - super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MicrosoftAccess' + super(MongoDBAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MongoDbAtlas' # type: str self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - - -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, - **kwargs - ): - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'MicrosoftAccessSink' - self.pre_copy_script = pre_copy_script + self.database = database -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. +class MongoDBAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. All required parameters must be populated in order to send to Azure. @@ -23120,8 +24470,20 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] @@ -23137,7 +24499,10 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -23148,85 +24513,22 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDBCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'MicrosoftAccessSource' - self.query = query + super(MongoDBAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout self.additional_columns = additional_columns -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~data_factory_management_client.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - **kwargs - ): - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MicrosoftAccessTable' - self.table_name = table_name - - class MongoDBCollectionDataset(Dataset): """The MongoDB database dataset. @@ -23293,7 +24595,7 @@ def __init__( **kwargs ): super(MongoDBCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MongoDbCollection' + self.type = 'MongoDbCollection' # type: str self.collection_name = collection_name @@ -23443,7 +24745,7 @@ def __init__( **kwargs ): super(MongoDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MongoDb' + self.type = 'MongoDb' # type: str self.server = server self.authentication_type = authentication_type self.database_name = database_name @@ -23509,7 +24811,7 @@ def __init__( **kwargs ): super(MongoDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'MongoDbSource' + self.type = 'MongoDbSource' # type: str self.query = query self.additional_columns = additional_columns @@ -23580,7 +24882,7 @@ def __init__( **kwargs ): super(MongoDBV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MongoDbV2Collection' + self.type = 'MongoDbV2Collection' # type: str self.collection = collection @@ -23640,7 +24942,7 @@ def __init__( **kwargs ): super(MongoDBV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MongoDbV2' + self.type = 'MongoDbV2' # type: str self.connection_string = connection_string self.database = database @@ -23715,7 +25017,7 @@ def __init__( **kwargs ): super(MongoDBV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'MongoDbV2Source' + self.type = 'MongoDbV2Source' # type: str self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size @@ -23723,7 +25025,7 @@ def __init__( self.additional_columns = additional_columns -class MySqlLinkedService(LinkedService): +class MySQLLinkedService(LinkedService): """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -23781,14 +25083,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MySql' + super(MySQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class MySqlSource(TabularSource): +class MySQLSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -23844,12 +25146,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'MySqlSource' + super(MySQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'MySqlSource' # type: str self.query = query -class MySqlTableDataset(Dataset): +class MySQLTableDataset(Dataset): """The MySQL table dataset. All required parameters must be populated in order to send to Azure. @@ -23912,8 +25214,8 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MySqlTable' + super(MySQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MySqlTable' # type: str self.table_name = table_name @@ -23976,7 +25278,7 @@ def __init__( **kwargs ): super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Netezza' + self.type = 'Netezza' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -24085,7 +25387,7 @@ def __init__( **kwargs ): super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'NetezzaSource' + self.type = 'NetezzaSource' # type: str self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -24166,7 +25468,7 @@ def __init__( **kwargs ): super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'NetezzaTable' + self.type = 'NetezzaTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -24209,13 +25511,17 @@ class ODataLinkedService(LinkedService): :param service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_id: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). :type aad_resource_id: object :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType + ~data_factory_management_client.models.ODataAADServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~data_factory_management_client.models.SecretBase @@ -24252,6 +25558,7 @@ class ODataLinkedService(LinkedService): 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, @@ -24274,8 +25581,9 @@ def __init__( password: Optional["SecretBase"] = None, tenant: Optional[object] = None, service_principal_id: Optional[object] = None, + azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, - aad_service_principal_credential_type: Optional[Union[str, "ODataAadServicePrincipalCredentialType"]] = None, + aad_service_principal_credential_type: Optional[Union[str, "ODataAADServicePrincipalCredentialType"]] = None, service_principal_key: Optional["SecretBase"] = None, service_principal_embedded_cert: Optional["SecretBase"] = None, service_principal_embedded_cert_password: Optional["SecretBase"] = None, @@ -24283,13 +25591,14 @@ def __init__( **kwargs ): super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'OData' + self.type = 'OData' # type: str self.url = url self.authentication_type = authentication_type self.user_name = user_name self.password = password self.tenant = tenant self.service_principal_id = service_principal_id + self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.aad_service_principal_credential_type = aad_service_principal_credential_type self.service_principal_key = service_principal_key @@ -24362,7 +25671,7 @@ def __init__( **kwargs ): super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ODataResource' + self.type = 'ODataResource' # type: str self.path = path @@ -24426,7 +25735,7 @@ def __init__( **kwargs ): super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'ODataSource' + self.type = 'ODataSource' # type: str self.query = query self.http_request_timeout = http_request_timeout self.additional_columns = additional_columns @@ -24508,7 +25817,7 @@ def __init__( **kwargs ): super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Odbc' + self.type = 'Odbc' # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -24575,7 +25884,7 @@ def __init__( **kwargs ): super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'OdbcSink' + self.type = 'OdbcSink' # type: str self.pre_copy_script = pre_copy_script @@ -24636,7 +25945,7 @@ def __init__( **kwargs ): super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'OdbcSource' + self.type = 'OdbcSource' # type: str self.query = query @@ -24704,7 +26013,7 @@ def __init__( **kwargs ): super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OdbcTable' + self.type = 'OdbcTable' # type: str self.table_name = table_name @@ -24779,7 +26088,7 @@ def __init__( **kwargs ): super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Office365Table' + self.type = 'Office365Table' # type: str self.table_name = table_name self.predicate = predicate @@ -24857,7 +26166,7 @@ def __init__( **kwargs ): super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Office365' + self.type = 'Office365' # type: str self.office365_tenant_id = office365_tenant_id self.service_principal_tenant_id = service_principal_tenant_id self.service_principal_id = service_principal_id @@ -24939,7 +26248,7 @@ def __init__( **kwargs ): super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'Office365Source' + self.type = 'Office365Source' # type: str self.allowed_groups = allowed_groups self.user_scope_filter_uri = user_scope_filter_uri self.date_filter_column = date_filter_column @@ -25289,7 +26598,7 @@ def __init__( **kwargs ): super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Oracle' + self.type = 'Oracle' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -25419,7 +26728,7 @@ def __init__( **kwargs ): super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'OracleServiceCloud' + self.type = 'OracleServiceCloud' # type: str self.host = host self.username = username self.password = password @@ -25493,7 +26802,7 @@ def __init__( **kwargs ): super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OracleServiceCloudObject' + self.type = 'OracleServiceCloudObject' # type: str self.table_name = table_name @@ -25555,7 +26864,7 @@ def __init__( **kwargs ): super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'OracleServiceCloudSource' + self.type = 'OracleServiceCloudSource' # type: str self.query = query @@ -25617,7 +26926,7 @@ def __init__( **kwargs ): super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'OracleSink' + self.type = 'OracleSink' # type: str self.pre_copy_script = pre_copy_script @@ -25688,7 +26997,7 @@ def __init__( **kwargs ): super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'OracleSource' + self.type = 'OracleSource' # type: str self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.partition_option = partition_option @@ -25771,7 +27080,7 @@ def __init__( **kwargs ): super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OracleTable' + self.type = 'OracleTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -25806,7 +27115,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -25845,7 +27154,7 @@ def __init__( **kwargs ): super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Orc' + self.type = 'Orc' # type: str self.location = location self.orc_compression_codec = orc_compression_codec @@ -25886,7 +27195,7 @@ def __init__( **kwargs ): super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type: str = 'OrcFormat' + self.type = 'OrcFormat' # type: str class OrcSink(CopySink): @@ -25916,6 +27225,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -25931,6 +27242,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -25943,11 +27255,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'OrcSink' + self.type = 'OrcSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class OrcSource(CopySource): @@ -26002,11 +27316,55 @@ def __init__( **kwargs ): super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'OrcSource' + self.type = 'OrcSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -26103,7 +27461,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -26142,7 +27500,7 @@ def __init__( **kwargs ): super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Parquet' + self.type = 'Parquet' # type: str self.location = location self.compression_codec = compression_codec @@ -26183,7 +27541,7 @@ def __init__( **kwargs ): super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type: str = 'ParquetFormat' + self.type = 'ParquetFormat' # type: str class ParquetSink(CopySink): @@ -26213,6 +27571,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -26228,6 +27588,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -26240,11 +27601,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'ParquetSink' + self.type = 'ParquetSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class ParquetSource(CopySource): @@ -26299,11 +27662,55 @@ def __init__( **kwargs ): super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'ParquetSource' + self.type = 'ParquetSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -26384,7 +27791,7 @@ def __init__( **kwargs ): super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Paypal' + self.type = 'Paypal' # type: str self.host = host self.client_id = client_id self.client_secret = client_secret @@ -26458,7 +27865,7 @@ def __init__( **kwargs ): super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PaypalObject' + self.type = 'PaypalObject' # type: str self.table_name = table_name @@ -26520,7 +27927,7 @@ def __init__( **kwargs ): super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'PaypalSource' + self.type = 'PaypalSource' # type: str self.query = query @@ -26633,7 +28040,7 @@ def __init__( **kwargs ): super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Phoenix' + self.type = 'Phoenix' # type: str self.host = host self.port = port self.http_path = http_path @@ -26723,7 +28130,7 @@ def __init__( **kwargs ): super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PhoenixObject' + self.type = 'PhoenixObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -26787,7 +28194,7 @@ def __init__( **kwargs ): super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'PhoenixSource' + self.type = 'PhoenixSource' # type: str self.query = query @@ -26893,8 +28300,8 @@ class PipelineResource(SubResource): :type annotations: list[object] :param run_dimensions: Dimensions emitted by Pipeline. :type run_dimensions: dict[str, object] - :param name_properties_folder_name: The name of the folder that this Pipeline is in. - :type name_properties_folder_name: str + :param name_folder_name: The name of the folder that this Pipeline is in. + :type name_folder_name: str """ _validation = { @@ -26918,7 +28325,7 @@ class PipelineResource(SubResource): 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'name_properties_folder_name': {'key': 'properties.folder.name', 'type': 'str'}, + 'name_folder_name': {'key': 'folder.name', 'type': 'str'}, } def __init__( @@ -26932,7 +28339,7 @@ def __init__( concurrency: Optional[int] = None, annotations: Optional[List[object]] = None, run_dimensions: Optional[Dict[str, object]] = None, - name_properties_folder_name: Optional[str] = None, + name_folder_name: Optional[str] = None, **kwargs ): super(PipelineResource, self).__init__(**kwargs) @@ -26944,7 +28351,7 @@ def __init__( self.concurrency = concurrency self.annotations = annotations self.run_dimensions = run_dimensions - self.name_properties_folder_name = name_properties_folder_name + self.name_folder_name = name_folder_name class PipelineRun(msrest.serialization.Model): @@ -27155,7 +28562,7 @@ def __init__( self.use_type_default = use_type_default -class PostgreSqlLinkedService(LinkedService): +class PostgreSQLLinkedService(LinkedService): """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -27213,14 +28620,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'PostgreSql' + super(PostgreSQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'PostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class PostgreSqlSource(TabularSource): +class PostgreSQLSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -27276,12 +28683,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'PostgreSqlSource' + super(PostgreSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'PostgreSqlSource' # type: str self.query = query -class PostgreSqlTableDataset(Dataset): +class PostgreSQLTableDataset(Dataset): """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -27354,8 +28761,8 @@ def __init__( schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PostgreSqlTable' + super(PostgreSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'PostgreSqlTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -27478,7 +28885,7 @@ def __init__( **kwargs ): super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Presto' + self.type = 'Presto' # type: str self.host = host self.server_version = server_version self.catalog = catalog @@ -27570,7 +28977,7 @@ def __init__( **kwargs ): super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PrestoObject' + self.type = 'PrestoObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -27634,7 +29041,7 @@ def __init__( **kwargs ): super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'PrestoSource' + self.type = 'PrestoSource' # type: str self.query = query @@ -27682,18 +29089,20 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com). + :param connection_properties: Properties used to connect to QuickBooks. It is mutually + exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object + :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to authorize. + :param company_id: The company ID of the QuickBooks company to authorize. :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. + :param consumer_key: The consumer key for OAuth 1.0 authentication. :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. + :param consumer_secret: The consumer secret for OAuth 1.0 authentication. :type consumer_secret: ~data_factory_management_client.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 authentication. + :param access_token: The access token for OAuth 1.0 authentication. :type access_token: ~data_factory_management_client.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. + :param access_token_secret: The access token secret for OAuth 1.0 authentication. :type access_token_secret: ~data_factory_management_client.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -27706,12 +29115,6 @@ class QuickBooksLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, } _attribute_map = { @@ -27721,6 +29124,7 @@ class QuickBooksLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, @@ -27734,23 +29138,25 @@ class QuickBooksLinkedService(LinkedService): def __init__( self, *, - endpoint: object, - company_id: object, - consumer_key: object, - consumer_secret: "SecretBase", - access_token: "SecretBase", - access_token_secret: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, + endpoint: Optional[object] = None, + company_id: Optional[object] = None, + consumer_key: Optional[object] = None, + consumer_secret: Optional["SecretBase"] = None, + access_token: Optional["SecretBase"] = None, + access_token_secret: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'QuickBooks' + self.type = 'QuickBooks' # type: str + self.connection_properties = connection_properties self.endpoint = endpoint self.company_id = company_id self.consumer_key = consumer_key @@ -27825,7 +29231,7 @@ def __init__( **kwargs ): super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'QuickBooksObject' + self.type = 'QuickBooksObject' # type: str self.table_name = table_name @@ -27887,7 +29293,7 @@ def __init__( **kwargs ): super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'QuickBooksSource' + self.type = 'QuickBooksSource' # type: str self.query = query @@ -28102,7 +29508,7 @@ def __init__( **kwargs ): super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'RelationalSource' + self.type = 'RelationalSource' # type: str self.query = query self.additional_columns = additional_columns @@ -28172,7 +29578,7 @@ def __init__( **kwargs ): super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'RelationalTable' + self.type = 'RelationalTable' # type: str self.table_name = table_name @@ -28242,7 +29648,7 @@ def __init__( **kwargs ): super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'RerunTumblingWindowTrigger' + self.type = 'RerunTumblingWindowTrigger' # type: str self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time @@ -28332,7 +29738,7 @@ def __init__( **kwargs ): super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Responsys' + self.type = 'Responsys' # type: str self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret @@ -28406,7 +29812,7 @@ def __init__( **kwargs ): super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ResponsysObject' + self.type = 'ResponsysObject' # type: str self.table_name = table_name @@ -28468,7 +29874,7 @@ def __init__( **kwargs ): super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'ResponsysSource' + self.type = 'ResponsysSource' # type: str self.query = query @@ -28557,7 +29963,7 @@ def __init__( **kwargs ): super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'RestResource' + self.type = 'RestResource' # type: str self.relative_url = relative_url self.request_method = request_method self.request_body = request_body @@ -28607,6 +30013,10 @@ class RestServiceLinkedService(LinkedService): :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: The resource you are requesting authorization to use. :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -28636,6 +30046,7 @@ class RestServiceLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -28656,12 +30067,13 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'RestService' + self.type = 'RestService' # type: str self.url = url self.enable_server_certificate_validation = enable_server_certificate_validation self.authentication_type = authentication_type @@ -28670,10 +30082,98 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential +class RestSink(CopySink): + """A copy activity Rest service Sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: + string (or Expression with resultType string). + :type request_method: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next request, in milliseconds. + :type request_interval: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + request_method: Optional[object] = None, + additional_headers: Optional[object] = None, + http_request_timeout: Optional[object] = None, + request_interval: Optional[object] = None, + http_compression_type: Optional[object] = None, + **kwargs + ): + super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'RestSink' # type: str + self.request_method = request_method + self.additional_headers = additional_headers + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.http_compression_type = http_compression_type + + class RestSource(CopySource): """A copy activity Rest service source. @@ -28753,7 +30253,7 @@ def __init__( **kwargs ): super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'RestSource' + self.type = 'RestSource' # type: str self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers @@ -29003,7 +30503,7 @@ def __init__( **kwargs ): super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Salesforce' + self.type = 'Salesforce' # type: str self.environment_url = environment_url self.username = username self.password = password @@ -29030,8 +30530,11 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud - application. Type: string (or Expression with resultType string). + :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + mutually exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object + :param client_id: The client ID associated with the Salesforce Marketing Cloud application. + Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). @@ -29055,7 +30558,6 @@ class SalesforceMarketingCloudLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { @@ -29065,6 +30567,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -29076,12 +30579,13 @@ class SalesforceMarketingCloudLinkedService(LinkedService): def __init__( self, *, - client_id: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, + client_id: Optional[object] = None, client_secret: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -29090,7 +30594,8 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SalesforceMarketingCloud' + self.type = 'SalesforceMarketingCloud' # type: str + self.connection_properties = connection_properties self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints @@ -29163,7 +30668,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceMarketingCloudObject' + self.type = 'SalesforceMarketingCloudObject' # type: str self.table_name = table_name @@ -29225,7 +30730,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SalesforceMarketingCloudSource' + self.type = 'SalesforceMarketingCloudSource' # type: str self.query = query @@ -29294,7 +30799,7 @@ def __init__( **kwargs ): super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceObject' + self.type = 'SalesforceObject' # type: str self.object_api_name = object_api_name @@ -29378,7 +30883,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SalesforceServiceCloud' + self.type = 'SalesforceServiceCloud' # type: str self.environment_url = environment_url self.username = username self.password = password @@ -29453,7 +30958,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceServiceCloudObject' + self.type = 'SalesforceServiceCloudObject' # type: str self.object_api_name = object_api_name @@ -29529,7 +31034,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SalesforceServiceCloudSink' + self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -29592,7 +31097,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SalesforceServiceCloudSource' + self.type = 'SalesforceServiceCloudSource' # type: str self.query = query self.read_behavior = read_behavior self.additional_columns = additional_columns @@ -29670,7 +31175,7 @@ def __init__( **kwargs ): super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SalesforceSink' + self.type = 'SalesforceSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -29738,7 +31243,7 @@ def __init__( **kwargs ): super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SalesforceSource' + self.type = 'SalesforceSource' # type: str self.query = query self.read_behavior = read_behavior @@ -29803,7 +31308,7 @@ def __init__( **kwargs ): super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapBwCube' + self.type = 'SapBwCube' # type: str class SapBwLinkedService(LinkedService): @@ -29883,7 +31388,7 @@ def __init__( **kwargs ): super(SapBwLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapBW' + self.type = 'SapBW' # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -29949,7 +31454,7 @@ def __init__( **kwargs ): super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SapBwSource' + self.type = 'SapBwSource' # type: str self.query = query @@ -30019,7 +31524,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapCloudForCustomer' + self.type = 'SapCloudForCustomer' # type: str self.url = url self.username = username self.password = password @@ -30092,7 +31597,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapCloudForCustomerResource' + self.type = 'SapCloudForCustomerResource' # type: str self.path = path @@ -30162,7 +31667,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SapCloudForCustomerSink' + self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = write_behavior self.http_request_timeout = http_request_timeout @@ -30232,7 +31737,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SapCloudForCustomerSource' + self.type = 'SapCloudForCustomerSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -30303,7 +31808,7 @@ def __init__( **kwargs ): super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapEcc' + self.type = 'SapEcc' # type: str self.url = url self.username = username self.password = password @@ -30376,7 +31881,7 @@ def __init__( **kwargs ): super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapEccResource' + self.type = 'SapEccResource' # type: str self.path = path @@ -30445,7 +31950,7 @@ def __init__( **kwargs ): super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SapEccSource' + self.type = 'SapEccSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -30525,7 +32030,7 @@ def __init__( **kwargs ): super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapHana' + self.type = 'SapHana' # type: str self.connection_string = connection_string self.server = server self.authentication_type = authentication_type @@ -30628,7 +32133,7 @@ def __init__( **kwargs ): super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SapHanaSource' + self.type = 'SapHanaSource' # type: str self.query = query self.packet_size = packet_size self.partition_option = partition_option @@ -30704,7 +32209,7 @@ def __init__( **kwargs ): super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapHanaTable' + self.type = 'SapHanaTable' # type: str self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -30727,26 +32232,38 @@ class SapOpenHubLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Host name of the SAP BW instance where the open hub destination is - located. Type: string (or Expression with resultType string). + :param server: Host name of the SAP BW instance where the open hub destination is located. + Type: string (or Expression with resultType string). :type server: object - :param system_number: Required. System number of the BW system where the open hub destination - is located. (Usually a two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). + :param system_number: System number of the BW system where the open hub destination is located. + (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with + resultType string). :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where the open hub - destination is located. (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). + :param client_id: Client ID of the client on the BW system where the open hub destination is + located. (Usually a three-digit decimal number represented as a string) Type: string (or + Expression with resultType string). :type client_id: object :param language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). :type language: object + :param system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :type system_id: object :param user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP BW server where the open hub destination is located. :type password: ~data_factory_management_client.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :type message_server_service: object + :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -30755,9 +32272,6 @@ class SapOpenHubLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { @@ -30771,36 +32285,48 @@ class SapOpenHubLinkedService(LinkedService): 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - server: object, - system_number: object, - client_id: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + server: Optional[object] = None, + system_number: Optional[object] = None, + client_id: Optional[object] = None, language: Optional[object] = None, + system_id: Optional[object] = None, user_name: Optional[object] = None, password: Optional["SecretBase"] = None, + message_server: Optional[object] = None, + message_server_service: Optional[object] = None, + logon_group: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapOpenHub' + self.type = 'SapOpenHub' # type: str self.server = server self.system_number = system_number self.client_id = client_id self.language = language + self.system_id = system_id self.user_name = user_name self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.logon_group = logon_group self.encrypted_credential = encrypted_credential @@ -30836,6 +32362,13 @@ class SapOpenHubSource(TabularSource): requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). :type base_request_id: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object """ _validation = { @@ -30852,6 +32385,8 @@ class SapOpenHubSource(TabularSource): 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, } def __init__( @@ -30865,12 +32400,16 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, exclude_last_request: Optional[object] = None, base_request_id: Optional[object] = None, + custom_rfc_read_table_function_module: Optional[object] = None, + sap_data_column_delimiter: Optional[object] = None, **kwargs ): super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SapOpenHubSource' + self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.sap_data_column_delimiter = sap_data_column_delimiter class SapOpenHubTableDataset(Dataset): @@ -30950,7 +32489,7 @@ def __init__( **kwargs ): super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapOpenHubTable' + self.type = 'SapOpenHubTable' # type: str self.open_hub_destination_name = open_hub_destination_name self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id @@ -31082,7 +32621,7 @@ def __init__( **kwargs ): super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapTable' + self.type = 'SapTable' # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -31209,7 +32748,7 @@ def __init__( **kwargs ): super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapTableResource' + self.type = 'SapTableResource' # type: str self.table_name = table_name @@ -31256,6 +32795,10 @@ class SapTableSource(TabularSource): :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". @@ -31283,6 +32826,7 @@ class SapTableSource(TabularSource): 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -31302,18 +32846,20 @@ def __init__( rfc_table_options: Optional[object] = None, batch_size: Optional[object] = None, custom_rfc_read_table_function_module: Optional[object] = None, + sap_data_column_delimiter: Optional[object] = None, partition_option: Optional[Union[str, "SapTablePartitionOption"]] = None, partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SapTableSource' + self.type = 'SapTableSource' # type: str self.row_count = row_count self.row_skips = row_skips self.rfc_table_fields = rfc_table_fields self.rfc_table_options = rfc_table_options self.batch_size = batch_size self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.sap_data_column_delimiter = sap_data_column_delimiter self.partition_option = partition_option self.partition_settings = partition_settings @@ -31370,7 +32916,7 @@ def __init__( **kwargs ): super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.type: str = 'ScheduleTrigger' + self.type = 'ScheduleTrigger' # type: str self.recurrence = recurrence @@ -31500,7 +33046,7 @@ def __init__( **kwargs ): super(SecureString, self).__init__(**kwargs) - self.type: str = 'SecureString' + self.type = 'SecureString' # type: str self.value = value @@ -31539,7 +33085,7 @@ def __init__( **kwargs ): super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.type: str = 'SelfDependencyTumblingWindowTriggerReference' + self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str self.offset = offset self.size = size @@ -31581,7 +33127,7 @@ def __init__( **kwargs ): super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.type: str = 'SelfHosted' + self.type = 'SelfHosted' # type: str self.linked_info = linked_info @@ -31824,7 +33370,7 @@ def __init__( **kwargs ): super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'SelfHosted' + self.type = 'SelfHosted' # type: str self.create_time = None self.task_queue_id = None self.internal_channel_encryption = None @@ -31940,7 +33486,7 @@ def __init__( **kwargs ): super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'ServiceNow' + self.type = 'ServiceNow' # type: str self.endpoint = endpoint self.authentication_type = authentication_type self.username = username @@ -32017,7 +33563,7 @@ def __init__( **kwargs ): super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ServiceNowObject' + self.type = 'ServiceNowObject' # type: str self.table_name = table_name @@ -32079,7 +33625,7 @@ def __init__( **kwargs ): super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'ServiceNowSource' + self.type = 'ServiceNowSource' # type: str self.query = query @@ -32136,7 +33682,7 @@ def __init__( **kwargs ): super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'SetVariable' + self.type = 'SetVariable' # type: str self.variable_name = variable_name self.value = value @@ -32179,7 +33725,7 @@ def __init__( **kwargs ): super(SftpLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type: str = 'SftpLocation' + self.type = 'SftpLocation' # type: str class SftpReadSettings(StoreReadSettings): @@ -32260,7 +33806,7 @@ def __init__( **kwargs ): super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SftpReadSettings' + self.type = 'SftpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -32376,7 +33922,7 @@ def __init__( **kwargs ): super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Sftp' + self.type = 'Sftp' # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -32438,7 +33984,7 @@ def __init__( **kwargs ): super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type: str = 'SftpWriteSettings' + self.type = 'SftpWriteSettings' # type: str self.operation_timeout = operation_timeout self.use_temp_file_rename = use_temp_file_rename @@ -32520,7 +34066,7 @@ def __init__( **kwargs ): super(SharePointOnlineListLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SharePointOnlineList' + self.type = 'SharePointOnlineList' # type: str self.site_url = site_url self.tenant_id = tenant_id self.service_principal_id = service_principal_id @@ -32593,7 +34139,7 @@ def __init__( **kwargs ): super(SharePointOnlineListResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SharePointOnlineListResource' + self.type = 'SharePointOnlineListResource' # type: str self.list_name = list_name @@ -32651,7 +34197,7 @@ def __init__( **kwargs ): super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SharePointOnlineListSource' + self.type = 'SharePointOnlineListSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -32732,7 +34278,7 @@ def __init__( **kwargs ): super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Shopify' + self.type = 'Shopify' # type: str self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -32805,7 +34351,7 @@ def __init__( **kwargs ): super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ShopifyObject' + self.type = 'ShopifyObject' # type: str self.table_name = table_name @@ -32867,7 +34413,7 @@ def __init__( **kwargs ): super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'ShopifySource' + self.type = 'ShopifySource' # type: str self.query = query @@ -32969,7 +34515,7 @@ def __init__( **kwargs ): super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SnowflakeTable' + self.type = 'SnowflakeTable' # type: str self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -33016,7 +34562,7 @@ def __init__( **kwargs ): super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'SnowflakeExportCopyCommand' + self.type = 'SnowflakeExportCopyCommand' # type: str self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options @@ -33063,7 +34609,7 @@ def __init__( **kwargs ): super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'SnowflakeImportCopyCommand' + self.type = 'SnowflakeImportCopyCommand' # type: str self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options @@ -33128,7 +34674,7 @@ def __init__( **kwargs ): super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Snowflake' + self.type = 'Snowflake' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -33196,7 +34742,7 @@ def __init__( **kwargs ): super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SnowflakeSink' + self.type = 'SnowflakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -33252,7 +34798,7 @@ def __init__( **kwargs ): super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SnowflakeSource' + self.type = 'SnowflakeSource' # type: str self.query = query self.export_settings = export_settings @@ -33376,7 +34922,7 @@ def __init__( **kwargs ): super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Spark' + self.type = 'Spark' # type: str self.host = host self.port = port self.server_type = server_type @@ -33467,7 +35013,7 @@ def __init__( **kwargs ): super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SparkObject' + self.type = 'SparkObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -33531,11 +35077,11 @@ def __init__( **kwargs ): super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SparkSource' + self.type = 'SparkSource' # type: str self.query = query -class SqlDWSink(CopySink): +class SQLDWSink(CopySink): """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. @@ -33616,8 +35162,8 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SqlDWSink' + super(SQLDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings @@ -33626,7 +35172,7 @@ def __init__( self.table_option = table_option -class SqlDWSource(TabularSource): +class SQLDWSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -33664,9 +35210,9 @@ class SqlDWSource(TabularSource): :type stored_procedure_parameters: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -33685,7 +35231,7 @@ class SqlDWSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( @@ -33700,12 +35246,12 @@ def __init__( sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[object] = None, - partition_option: Optional[Union[str, "SqlPartitionOption"]] = None, - partition_settings: Optional["SqlPartitionSettings"] = None, + partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, + partition_settings: Optional["SQLPartitionSettings"] = None, **kwargs ): - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SqlDWSource' + super(SQLDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -33713,7 +35259,7 @@ def __init__( self.partition_settings = partition_settings -class SqlMiSink(CopySink): +class SQLMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. @@ -33795,8 +35341,8 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SqlMISink' + super(SQLMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -33805,7 +35351,7 @@ def __init__( self.table_option = table_option -class SqlMiSource(TabularSource): +class SQLMiSource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -33844,9 +35390,9 @@ class SqlMiSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -33866,7 +35412,7 @@ class SqlMiSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( @@ -33882,12 +35428,12 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SqlPartitionOption"]] = None, - partition_settings: Optional["SqlPartitionSettings"] = None, + partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, + partition_settings: Optional["SQLPartitionSettings"] = None, **kwargs ): - super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SqlMISource' + super(SQLMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -33896,7 +35442,7 @@ def __init__( self.partition_settings = partition_settings -class SqlPartitionSettings(msrest.serialization.Model): +class SQLPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be @@ -33929,13 +35475,13 @@ def __init__( partition_lower_bound: Optional[object] = None, **kwargs ): - super(SqlPartitionSettings, self).__init__(**kwargs) + super(SQLPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class SqlServerLinkedService(LinkedService): +class SQLServerLinkedService(LinkedService): """SQL Server linked service. All required parameters must be populated in order to send to Azure. @@ -33999,15 +35545,15 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SqlServer' + super(SQLServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SqlServer' # type: str self.connection_string = connection_string self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential -class SqlServerSink(CopySink): +class SQLServerSink(CopySink): """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. @@ -34089,8 +35635,8 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SqlServerSink' + super(SQLServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -34099,7 +35645,7 @@ def __init__( self.table_option = table_option -class SqlServerSource(TabularSource): +class SQLServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -34138,9 +35684,9 @@ class SqlServerSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -34160,7 +35706,7 @@ class SqlServerSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( @@ -34176,12 +35722,12 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SqlPartitionOption"]] = None, - partition_settings: Optional["SqlPartitionSettings"] = None, + partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, + partition_settings: Optional["SQLPartitionSettings"] = None, **kwargs ): - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SqlServerSource' + super(SQLServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -34190,7 +35736,7 @@ def __init__( self.partition_settings = partition_settings -class SqlServerStoredProcedureActivity(ExecutionActivity): +class SQLServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. @@ -34254,13 +35800,13 @@ def __init__( stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, **kwargs ): - super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'SqlServerStoredProcedure' + super(SQLServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters -class SqlServerTableDataset(Dataset): +class SQLServerTableDataset(Dataset): """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. @@ -34334,14 +35880,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SqlServerTable' + super(SQLServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SqlServerTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class SqlSink(CopySink): +class SQLSink(CopySink): """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. @@ -34423,8 +35969,8 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'SqlSink' + super(SQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -34433,7 +35979,7 @@ def __init__( self.table_option = table_option -class SqlSource(TabularSource): +class SQLSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -34474,9 +36020,9 @@ class SqlSource(TabularSource): :type isolation_level: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SqlPartitionOption + :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings """ _validation = { @@ -34496,7 +36042,7 @@ class SqlSource(TabularSource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, } def __init__( @@ -34512,12 +36058,12 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, isolation_level: Optional[object] = None, - partition_option: Optional[Union[str, "SqlPartitionOption"]] = None, - partition_settings: Optional["SqlPartitionSettings"] = None, + partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, + partition_settings: Optional["SQLPartitionSettings"] = None, **kwargs ): - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SqlSource' + super(SQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -34544,14 +36090,17 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The URL of the Square instance. (i.e. mystore.mysquare.com). + :param connection_properties: Properties used to connect to Square. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object + :param host: The URL of the Square instance. (i.e. mystore.mysquare.com). :type host: object - :param client_id: Required. The client ID associated with your Square application. + :param client_id: The client ID associated with your Square application. :type client_id: object :param client_secret: The client secret associated with your Square application. :type client_secret: ~data_factory_management_client.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square application dashboard. - (i.e. http://localhost:2500). + :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. + http://localhost:2500). :type redirect_uri: object :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -34571,9 +36120,6 @@ class SquareLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - 'redirect_uri': {'required': True}, } _attribute_map = { @@ -34583,6 +36129,7 @@ class SquareLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, @@ -34596,15 +36143,16 @@ class SquareLinkedService(LinkedService): def __init__( self, *, - host: object, - client_id: object, - redirect_uri: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, + host: Optional[object] = None, + client_id: Optional[object] = None, client_secret: Optional["SecretBase"] = None, + redirect_uri: Optional[object] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, use_peer_verification: Optional[object] = None, @@ -34612,7 +36160,8 @@ def __init__( **kwargs ): super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Square' + self.type = 'Square' # type: str + self.connection_properties = connection_properties self.host = host self.client_id = client_id self.client_secret = client_secret @@ -34687,7 +36236,7 @@ def __init__( **kwargs ): super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SquareObject' + self.type = 'SquareObject' # type: str self.table_name = table_name @@ -34749,7 +36298,7 @@ def __init__( **kwargs ): super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SquareSource' + self.type = 'SquareSource' # type: str self.query = query @@ -34880,7 +36429,7 @@ def __init__( **kwargs ): super(SsisObjectMetadata, self).__init__(**kwargs) - self.type: Optional[str] = None + self.type = None # type: Optional[str] self.id = id self.name = name self.description = description @@ -34930,7 +36479,7 @@ def __init__( **kwargs ): super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) - self.type: str = 'Environment' + self.type = 'Environment' # type: str self.folder_id = folder_id self.variables = variables @@ -35074,21 +36623,19 @@ def __init__( **kwargs ): super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) - self.type: str = 'Folder' + self.type = 'Folder' # type: str class SsisLogLocation(msrest.serialization.Model): """SSIS package execution log location. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File". - :vartype type: str + :param type: Required. The type of SSIS log location. Possible values include: "File". + :type type: str or ~data_factory_management_client.models.SsisLogLocationType :param access_credential: The package execution log access credential. :type access_credential: ~data_factory_management_client.models.SsisAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 @@ -35099,7 +36646,7 @@ class SsisLogLocation(msrest.serialization.Model): _validation = { 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, } _attribute_map = { @@ -35109,18 +36656,18 @@ class SsisLogLocation(msrest.serialization.Model): 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, } - type = "File" - def __init__( self, *, log_path: object, + type: Union[str, "SsisLogLocationType"], access_credential: Optional["SsisAccessCredential"] = None, log_refresh_interval: Optional[object] = None, **kwargs ): super(SsisLogLocation, self).__init__(**kwargs) self.log_path = log_path + self.type = type self.access_credential = access_credential self.log_refresh_interval = log_refresh_interval @@ -35239,7 +36786,7 @@ def __init__( **kwargs ): super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) - self.type: str = 'Package' + self.type = 'Package' # type: str self.folder_id = folder_id self.project_version = project_version self.project_id = project_id @@ -35445,7 +36992,7 @@ def __init__( **kwargs ): super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) - self.type: str = 'Project' + self.type = 'Project' # type: str self.folder_id = folder_id self.version = version self.environment_refs = environment_refs @@ -35672,7 +37219,7 @@ def __init__( **kwargs ): super(SwitchActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Switch' + self.type = 'Switch' # type: str self.on = on self.cases = cases self.default_activities = default_activities @@ -35785,7 +37332,7 @@ def __init__( **kwargs ): super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Sybase' + self.type = 'Sybase' # type: str self.server = server self.database = database self.schema = schema @@ -35852,7 +37399,7 @@ def __init__( **kwargs ): super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'SybaseSource' + self.type = 'SybaseSource' # type: str self.query = query @@ -35920,7 +37467,7 @@ def __init__( **kwargs ): super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SybaseTable' + self.type = 'SybaseTable' # type: str self.table_name = table_name @@ -35992,7 +37539,7 @@ def __init__( **kwargs ): super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'TabularTranslator' + self.type = 'TabularTranslator' # type: str self.column_mappings = column_mappings self.schema_mapping = schema_mapping self.collection_reference = collection_reference @@ -36002,6 +37549,80 @@ def __init__( self.type_conversion_settings = type_conversion_settings +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -36076,7 +37697,7 @@ def __init__( **kwargs ): super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Teradata' + self.type = 'Teradata' # type: str self.connection_string = connection_string self.server = server self.authentication_type = authentication_type @@ -36188,7 +37809,7 @@ def __init__( **kwargs ): super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'TeradataSource' + self.type = 'TeradataSource' # type: str self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -36263,7 +37884,7 @@ def __init__( **kwargs ): super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'TeradataTable' + self.type = 'TeradataTable' # type: str self.database = database self.table = table @@ -36349,7 +37970,7 @@ def __init__( **kwargs ): super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type: str = 'TextFormat' + self.type = 'TextFormat' # type: str self.column_delimiter = column_delimiter self.row_delimiter = row_delimiter self.escape_char = escape_char @@ -36396,7 +38017,7 @@ def __init__( **kwargs ): super(TriggerDependencyReference, self).__init__(**kwargs) - self.type: str = 'TriggerDependencyReference' + self.type = 'TriggerDependencyReference' # type: str self.reference_trigger = reference_trigger @@ -36834,7 +38455,7 @@ def __init__( **kwargs ): super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'TumblingWindowTrigger' + self.type = 'TumblingWindowTrigger' # type: str self.pipeline = pipeline self.frequency = frequency self.interval = interval @@ -36886,7 +38507,7 @@ def __init__( **kwargs ): super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) - self.type: str = 'TumblingWindowTriggerDependencyReference' + self.type = 'TumblingWindowTriggerDependencyReference' # type: str self.offset = offset self.size = size @@ -37007,7 +38628,7 @@ def __init__( **kwargs ): super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Until' + self.type = 'Until' # type: str self.expression = expression self.timeout = timeout self.activities = activities @@ -37220,7 +38841,7 @@ def __init__( **kwargs ): super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Validation' + self.type = 'Validation' # type: str self.timeout = timeout self.sleep = sleep self.minimum_size = minimum_size @@ -37319,7 +38940,7 @@ def __init__( **kwargs ): super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Vertica' + self.type = 'Vertica' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -37383,7 +39004,7 @@ def __init__( **kwargs ): super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'VerticaSource' + self.type = 'VerticaSource' # type: str self.query = query @@ -37462,7 +39083,7 @@ def __init__( **kwargs ): super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'VerticaTable' + self.type = 'VerticaTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -37518,7 +39139,7 @@ def __init__( **kwargs ): super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Wait' + self.type = 'Wait' # type: str self.wait_time_in_seconds = wait_time_in_seconds @@ -37614,7 +39235,7 @@ def __init__( **kwargs ): super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'WebActivity' + self.type = 'WebActivity' # type: str self.method = method self.url = url self.headers = headers @@ -37712,7 +39333,7 @@ def __init__( ): super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = url - self.authentication_type: Optional[str] = None + self.authentication_type = None # type: Optional[str] class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): @@ -37746,7 +39367,7 @@ def __init__( **kwargs ): super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'Anonymous' + self.authentication_type = 'Anonymous' # type: str class WebBasicAuthentication(WebLinkedServiceTypeProperties): @@ -37791,7 +39412,7 @@ def __init__( **kwargs ): super(WebBasicAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'Basic' + self.authentication_type = 'Basic' # type: str self.username = username self.password = password @@ -37837,7 +39458,7 @@ def __init__( **kwargs ): super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'ClientCertificate' + self.authentication_type = 'ClientCertificate' # type: str self.pfx = pfx self.password = password @@ -37845,8 +39466,6 @@ def __init__( class WebHookActivity(Activity): """WebHook activity. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -37862,8 +39481,8 @@ class WebHookActivity(Activity): :type depends_on: list[~data_factory_management_client.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~data_factory_management_client.models.UserProperty] - :ivar method: Required. Rest API method for target endpoint. Default value: "POST". - :vartype method: str + :param method: Required. Rest API method for target endpoint. Possible values include: "POST". + :type method: str or ~data_factory_management_client.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -37880,17 +39499,17 @@ class WebHookActivity(Activity): :type body: object :param authentication: Authentication method used for calling the endpoint. :type authentication: ~data_factory_management_client.models.WebActivityAuthentication - :param report_status_on_call_back: When set to true, - statusCode, output and error in callback request body will be - consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in - callback request. Default is false. Type: boolean (or Expression with resultType boolean). + :param report_status_on_call_back: When set to true, statusCode, output and error in callback + request body will be consumed by activity. The activity can be marked as failed by setting + statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with + resultType boolean). :type report_status_on_call_back: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, + 'method': {'required': True}, 'url': {'required': True}, } @@ -37910,12 +39529,11 @@ class WebHookActivity(Activity): 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, } - method = "POST" - def __init__( self, *, name: str, + method: Union[str, "WebHookActivityMethod"], url: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, @@ -37929,7 +39547,8 @@ def __init__( **kwargs ): super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'WebHook' + self.type = 'WebHook' # type: str + self.method = method self.url = url self.timeout = timeout self.headers = headers @@ -37987,7 +39606,7 @@ def __init__( **kwargs ): super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Web' + self.type = 'Web' # type: str self.type_properties = type_properties @@ -38039,7 +39658,7 @@ def __init__( **kwargs ): super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'WebSource' + self.type = 'WebSource' # type: str self.additional_columns = additional_columns @@ -38114,7 +39733,7 @@ def __init__( **kwargs ): super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'WebTable' + self.type = 'WebTable' # type: str self.index = index self.path = path @@ -38137,7 +39756,10 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The endpoint of the Xero server. (i.e. api.xero.com). + :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object + :param host: The endpoint of the Xero server. (i.e. api.xero.com). :type host: object :param consumer_key: The consumer key associated with the Xero application. :type consumer_key: ~data_factory_management_client.models.SecretBase @@ -38163,7 +39785,6 @@ class XeroLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { @@ -38173,6 +39794,7 @@ class XeroLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, @@ -38185,12 +39807,13 @@ class XeroLinkedService(LinkedService): def __init__( self, *, - host: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, + host: Optional[object] = None, consumer_key: Optional["SecretBase"] = None, private_key: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, @@ -38200,7 +39823,8 @@ def __init__( **kwargs ): super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Xero' + self.type = 'Xero' # type: str + self.connection_properties = connection_properties self.host = host self.consumer_key = consumer_key self.private_key = private_key @@ -38274,7 +39898,7 @@ def __init__( **kwargs ): super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'XeroObject' + self.type = 'XeroObject' # type: str self.table_name = table_name @@ -38336,7 +39960,7 @@ def __init__( **kwargs ): super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'XeroSource' + self.type = 'XeroSource' # type: str self.query = query @@ -38420,7 +40044,7 @@ def __init__( **kwargs ): super(XmlDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Xml' + self.type = 'Xml' # type: str self.location = location self.encoding_name = encoding_name self.null_value = null_value @@ -38442,6 +40066,12 @@ class XmlReadSettings(FormatReadSettings): :param validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). :type validation_mode: object + :param detect_data_type: Indicates whether type detection is enabled when reading the xml + files. Type: boolean (or Expression with resultType boolean). + :type detect_data_type: object + :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + boolean (or Expression with resultType boolean). + :type namespaces: object :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: @@ -38458,6 +40088,8 @@ class XmlReadSettings(FormatReadSettings): 'type': {'key': 'type', 'type': 'str'}, 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, 'validation_mode': {'key': 'validationMode', 'type': 'object'}, + 'detect_data_type': {'key': 'detectDataType', 'type': 'object'}, + 'namespaces': {'key': 'namespaces', 'type': 'object'}, 'namespace_prefixes': {'key': 'namespacePrefixes', 'type': 'object'}, } @@ -38467,13 +40099,17 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, compression_properties: Optional["CompressionReadSettings"] = None, validation_mode: Optional[object] = None, + detect_data_type: Optional[object] = None, + namespaces: Optional[object] = None, namespace_prefixes: Optional[object] = None, **kwargs ): super(XmlReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'XmlReadSettings' + self.type = 'XmlReadSettings' # type: str self.compression_properties = compression_properties self.validation_mode = validation_mode + self.detect_data_type = detect_data_type + self.namespaces = namespaces self.namespace_prefixes = namespace_prefixes @@ -38533,7 +40169,7 @@ def __init__( **kwargs ): super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type: str = 'XmlSource' + self.type = 'XmlSource' # type: str self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns @@ -38572,7 +40208,7 @@ def __init__( **kwargs ): super(ZipDeflateReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'ZipDeflateReadSettings' + self.type = 'ZipDeflateReadSettings' # type: str self.preserve_zip_file_name_as_folder = preserve_zip_file_name_as_folder @@ -38594,7 +40230,10 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). + :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object + :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). :type endpoint: object :param access_token: The access token for Zoho authentication. :type access_token: ~data_factory_management_client.models.SecretBase @@ -38616,7 +40255,6 @@ class ZohoLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, } _attribute_map = { @@ -38626,6 +40264,7 @@ class ZohoLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -38637,12 +40276,13 @@ class ZohoLinkedService(LinkedService): def __init__( self, *, - endpoint: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, + endpoint: Optional[object] = None, access_token: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -38651,7 +40291,8 @@ def __init__( **kwargs ): super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Zoho' + self.type = 'Zoho' # type: str + self.connection_properties = connection_properties self.endpoint = endpoint self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -38724,7 +40365,7 @@ def __init__( **kwargs ): super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ZohoObject' + self.type = 'ZohoObject' # type: str self.table_name = table_name @@ -38786,5 +40427,5 @@ def __init__( **kwargs ): super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type: str = 'ZohoSource' + self.type = 'ZohoSource' # type: str self.query = query diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py index 2073b4d42fb..3f6a32ff284 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py @@ -21,6 +21,8 @@ from ._trigger_run_operations import TriggerRunOperations from ._data_flow_operations import DataFlowOperations from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations +from ._managed_virtual_network_operations import ManagedVirtualNetworkOperations +from ._managed_private_endpoint_operations import ManagedPrivateEndpointOperations __all__ = [ 'OperationOperations', @@ -38,4 +40,6 @@ 'TriggerRunOperations', 'DataFlowOperations', 'DataFlowDebugSessionOperations', + 'ManagedVirtualNetworkOperations', + 'ManagedPrivateEndpointOperations', ] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py index b96de3365ed..192e09232ad 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py @@ -52,7 +52,7 @@ def query_by_pipeline_run( run_id, # type: str last_updated_after, # type: datetime.datetime last_updated_before, # type: datetime.datetime - continuation_token=None, # type: Optional[str] + continuation_token_parameter=None, # type: Optional[str] filters=None, # type: Optional[List["models.RunQueryFilter"]] order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] **kwargs # type: Any @@ -72,9 +72,9 @@ def query_by_pipeline_run( :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' format. :type last_updated_before: ~datetime.datetime - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param filters: List of filters. :type filters: list[~data_factory_management_client.models.RunQueryFilter] :param order_by: List of OrderBy option. @@ -88,7 +88,7 @@ def query_by_pipeline_run( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.RunFilterParameters(continuation_token=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) + filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -111,9 +111,8 @@ def query_by_pipeline_run( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'RunFilterParameters') + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py index 77b18a7b485..446c117302f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py @@ -58,12 +58,12 @@ def _create_initial( properties=None, # type: Optional["models.IntegrationRuntime"] **kwargs # type: Any ): - # type: (...) -> "models.CreateDataFlowDebugSessionResponse" - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"] + # type: (...) -> Optional["models.CreateDataFlowDebugSessionResponse"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) + request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -85,9 +85,8 @@ def _create_initial( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'CreateDataFlowDebugSessionRequest') + body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -123,7 +122,7 @@ def begin_create( properties=None, # type: Optional["models.IntegrationRuntime"] **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller["models.CreateDataFlowDebugSessionResponse"] """Creates a data flow debug session. :param resource_group_name: The resource group name. @@ -131,10 +130,10 @@ def begin_create( :param factory_name: The factory name. :type factory_name: str :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. + setting in integration runtime if provided. :type compute_type: str :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. + in integration runtime if provided. :type core_count: int :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int @@ -143,6 +142,7 @@ def begin_create( :param properties: Integration runtime properties. :type properties: ~data_factory_management_client.models.IntegrationRuntime :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -157,17 +157,19 @@ def begin_create( 'polling_interval', self._config.polling_interval ) - raw_result = self._create_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + compute_type=compute_type, + core_count=core_count, + time_to_live=time_to_live, + name=name, + properties=properties, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -182,7 +184,15 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore def query_by_factory( @@ -209,6 +219,10 @@ def query_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -222,15 +236,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.post(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -267,9 +277,8 @@ def add_data_flow( source_settings=None, # type: Optional[List["models.DataFlowSourceSetting"]] parameters=None, # type: Optional[Dict[str, object]] dataset_parameters=None, # type: Optional[object] - folder_path=None, # type: Optional[str] + folder_path=None, # type: Optional[object] reference_name=None, # type: Optional[str] - parameter_value_specification_parameters=None, # type: Optional[Dict[str, object]] name=None, # type: Optional[str] properties=None, # type: Optional["models.DataFlow"] **kwargs # type: Any @@ -293,12 +302,11 @@ def add_data_flow( :type parameters: dict[str, object] :param dataset_parameters: Parameters for dataset. :type dataset_parameters: object - :param folder_path: Folder path for staging blob. - :type folder_path: str + :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :type folder_path: object :param reference_name: Reference LinkedService name. :type reference_name: str - :param parameter_value_specification_parameters: Arguments for LinkedService. - :type parameter_value_specification_parameters: dict[str, object] :param name: The resource name. :type name: str :param properties: Data flow properties. @@ -312,7 +320,7 @@ def add_data_flow( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, parameters_staging_linked_service_parameters=parameter_value_specification_parameters, name=name, properties=properties) + request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -334,9 +342,8 @@ def add_data_flow( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'DataFlowDebugPackage') + body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -380,7 +387,7 @@ def delete( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) + request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -401,9 +408,8 @@ def delete( header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'DeleteDataFlowDebugSessionRequest') + body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -428,12 +434,12 @@ def _execute_command_initial( command_payload=None, # type: Optional["models.DataFlowDebugCommandPayload"] **kwargs # type: Any ): - # type: (...) -> "models.DataFlowDebugCommandResponse" - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"] + # type: (...) -> Optional["models.DataFlowDebugCommandResponse"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) + request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -455,9 +461,8 @@ def _execute_command_initial( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_request, 'DataFlowDebugCommandRequest') + body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -491,7 +496,7 @@ def begin_execute_command( command_payload=None, # type: Optional["models.DataFlowDebugCommandPayload"] **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller["models.DataFlowDebugCommandResponse"] """Execute a data flow debug command. :param resource_group_name: The resource group name. @@ -505,6 +510,7 @@ def begin_execute_command( :param command_payload: The command payload object. :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -519,15 +525,17 @@ def begin_execute_command( 'polling_interval', self._config.polling_interval ) - raw_result = self._execute_command_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._execute_command_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + session_id=session_id, + command=command, + command_payload=command_payload, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -542,5 +550,13 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py index f576e949ebc..e0bd3be1783 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py @@ -77,7 +77,7 @@ def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _data_flow = models.DataFlowResource(properties=properties) + data_flow = models.DataFlowResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -102,9 +102,8 @@ def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_data_flow, 'DataFlowResource') + body_content = self._serialize.body(data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -173,7 +172,6 @@ def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -233,7 +231,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -271,6 +268,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -284,15 +285,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py index 85f93fe5c3c..2f866416c74 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py @@ -69,6 +69,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -82,15 +86,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -149,7 +149,7 @@ def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _dataset = models.DatasetResource(properties=properties) + dataset = models.DatasetResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -174,9 +174,8 @@ def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_dataset, 'DatasetResource') + body_content = self._serialize.body(dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -203,7 +202,7 @@ def get( if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.DatasetResource" + # type: (...) -> Optional["models.DatasetResource"] """Gets a dataset. :param resource_group_name: The resource group name. @@ -220,7 +219,7 @@ def get( :rtype: ~data_factory_management_client.models.DatasetResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -245,7 +244,6 @@ def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -307,7 +305,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py index ad1e328374e..d2667ffac81 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py @@ -17,7 +17,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -69,7 +69,7 @@ def get_feature_value( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -90,9 +90,8 @@ def get_feature_value( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_exposure_control_request, 'ExposureControlRequest') + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -139,7 +138,7 @@ def get_feature_value_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -161,9 +160,8 @@ def get_feature_value_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_exposure_control_request, 'ExposureControlRequest') + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -181,3 +179,70 @@ def get_feature_value_by_factory( return deserialized get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore + + def query_feature_value_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + exposure_control_requests, # type: List["models.ExposureControlRequest"] + **kwargs # type: Any + ): + # type: (...) -> "models.ExposureControlBatchResponse" + """Get list of exposure control features for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param exposure_control_requests: List of exposure control features. + :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlBatchResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.query_feature_value_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py index 4d88a08da23..ca2ecba0545 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py @@ -18,7 +18,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -63,6 +63,10 @@ def list( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore @@ -74,15 +78,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -134,7 +134,7 @@ def configure_factory_repo( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) + factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -155,9 +155,8 @@ def configure_factory_repo( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_factory_repo_update, 'FactoryRepoUpdate') + body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -197,6 +196,10 @@ def list_by_resource_group( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_resource_group.metadata['url'] # type: ignore @@ -209,15 +212,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -254,6 +253,7 @@ def create_or_update( identity=None, # type: Optional["models.FactoryIdentity"] repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]] + public_network_access=None, # type: Optional[Union[str, "models.PublicNetworkAccess"]] **kwargs # type: Any ): # type: (...) -> "models.Factory" @@ -276,6 +276,9 @@ def create_or_update( :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory @@ -285,7 +288,7 @@ def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) + factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters, public_network_access=public_network_access) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -309,9 +312,8 @@ def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_factory, 'Factory') + body_content = self._serialize.body(factory, 'Factory') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -358,7 +360,7 @@ def update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) + factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -380,9 +382,8 @@ def update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_factory_update_parameters, 'FactoryUpdateParameters') + body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) @@ -408,7 +409,7 @@ def get( if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Factory" + # type: (...) -> Optional["models.Factory"] """Gets a factory. :param resource_group_name: The resource group name. @@ -423,7 +424,7 @@ def get( :rtype: ~data_factory_management_client.models.Factory or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -447,7 +448,6 @@ def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -505,7 +505,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -550,7 +549,7 @@ def get_git_hub_access_token( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) + git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -572,9 +571,8 @@ def get_git_hub_access_token( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_git_hub_access_token_request, 'GitHubAccessTokenRequest') + body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -634,7 +632,7 @@ def get_data_plane_access( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) + policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -656,9 +654,8 @@ def get_data_plane_access( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_policy, 'UserAccessPolicy') + body_content = self._serialize.body(policy, 'UserAccessPolicy') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py index da9139dba1f..a7903633080 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py @@ -92,7 +92,6 @@ def get( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -156,7 +155,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -202,7 +200,7 @@ def update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) + update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -226,9 +224,8 @@ def update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') + body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) @@ -295,7 +292,6 @@ def get_ip_address( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py index 22ef4854565..461ab7b6539 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -53,8 +53,8 @@ def _refresh_initial( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.SsisObjectMetadataStatusResponse" - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"] + # type: (...) -> Optional["models.SsisObjectMetadataStatusResponse"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -77,7 +77,6 @@ def _refresh_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -103,7 +102,7 @@ def begin_refresh( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller["models.SsisObjectMetadataStatusResponse"] """Refresh a SSIS integration runtime object metadata. :param resource_group_name: The resource group name. @@ -113,6 +112,7 @@ def begin_refresh( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -127,13 +127,15 @@ def begin_refresh( 'polling_interval', self._config.polling_interval ) - raw_result = self._refresh_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._refresh_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -148,7 +150,15 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore def get( @@ -160,7 +170,8 @@ def get( **kwargs # type: Any ): # type: (...) -> "models.SsisObjectMetadataListResponse" - """Get a SSIS integration runtime object metadata by specified path. The return is pageable metadata list. + """Get a SSIS integration runtime object metadata by specified path. The return is pageable + metadata list. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -179,7 +190,7 @@ def get( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) + get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -202,10 +213,9 @@ def get( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - if _get_metadata_request is not None: - body_content = self._serialize.body(_get_metadata_request, 'GetSsisObjectMetadataRequest') + if get_metadata_request is not None: + body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') else: body_content = None body_content_kwargs['content'] = body_content diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py index b3b65b26183..1fb5fc6b30d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py @@ -71,6 +71,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -84,15 +88,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -151,7 +151,7 @@ def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _integration_runtime = models.IntegrationRuntimeResource(properties=properties) + integration_runtime = models.IntegrationRuntimeResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -176,9 +176,8 @@ def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_integration_runtime, 'IntegrationRuntimeResource') + body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -205,7 +204,7 @@ def get( if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.IntegrationRuntimeResource" + # type: (...) -> Optional["models.IntegrationRuntimeResource"] """Gets an integration runtime. :param resource_group_name: The resource group name. @@ -223,7 +222,7 @@ def get( :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -248,7 +247,6 @@ def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -300,7 +298,7 @@ def update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) + update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -323,9 +321,8 @@ def update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) @@ -387,7 +384,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -445,7 +441,6 @@ def get_status( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -470,7 +465,8 @@ def get_connection_info( **kwargs # type: Any ): # type: (...) -> "models.IntegrationRuntimeConnectionInfo" - """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. + """Gets the on-premises integration runtime connection information for encrypting the on-premises + data source credentials. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -506,7 +502,6 @@ def get_connection_info( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -551,7 +546,7 @@ def regenerate_auth_key( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) + regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -574,9 +569,8 @@ def regenerate_auth_key( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') + body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -639,7 +633,6 @@ def list_auth_key( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -663,8 +656,8 @@ def _start_initial( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.IntegrationRuntimeStatusResponse" - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -687,7 +680,6 @@ def _start_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -713,7 +705,7 @@ def begin_start( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller["models.IntegrationRuntimeStatusResponse"] """Starts a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -723,6 +715,7 @@ def begin_start( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -737,13 +730,15 @@ def begin_start( 'polling_interval', self._config.polling_interval ) - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -758,7 +753,15 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore def _stop_initial( @@ -791,7 +794,6 @@ def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -812,7 +814,7 @@ def begin_stop( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller[None] """Stops a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -822,6 +824,7 @@ def begin_stop( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -836,13 +839,15 @@ def begin_stop( 'polling_interval', self._config.polling_interval ) - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -854,7 +859,15 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore def sync_credentials( @@ -865,7 +878,10 @@ def sync_credentials( **kwargs # type: Any ): # type: (...) -> None - """Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the dispatcher node. If you already have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime node than using this API directly. + """Force the integration runtime to synchronize credentials across integration runtime nodes, and + this will override the credentials across all worker nodes with those available on the + dispatcher node. If you already have the latest credential backup file, you should manually + import it (preferred) on any self-hosted integration runtime node than using this API directly. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -900,7 +916,6 @@ def sync_credentials( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -922,7 +937,8 @@ def get_monitoring_data( **kwargs # type: Any ): # type: (...) -> "models.IntegrationRuntimeMonitoringData" - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. + """Get the integration runtime monitoring data, which includes the monitor data for all the nodes + under this integration runtime. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -958,7 +974,6 @@ def get_monitoring_data( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1018,7 +1033,6 @@ def upgrade( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1041,7 +1055,8 @@ def remove_link( **kwargs # type: Any ): # type: (...) -> None - """Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime. + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -1060,7 +1075,7 @@ def remove_link( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) + linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -1082,9 +1097,8 @@ def remove_link( header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -1140,7 +1154,7 @@ def create_linked_integration_runtime( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) + create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -1163,9 +1177,8 @@ def create_linked_integration_runtime( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py index 39b7df21557..7124cb588eb 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py @@ -69,6 +69,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -82,15 +86,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -149,7 +149,7 @@ def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _linked_service = models.LinkedServiceResource(properties=properties) + linked_service = models.LinkedServiceResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -174,9 +174,8 @@ def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_linked_service, 'LinkedServiceResource') + body_content = self._serialize.body(linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -203,7 +202,7 @@ def get( if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.LinkedServiceResource" + # type: (...) -> Optional["models.LinkedServiceResource"] """Gets a linked service. :param resource_group_name: The resource group name. @@ -221,7 +220,7 @@ def get( :rtype: ~data_factory_management_client.models.LinkedServiceResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -246,7 +245,6 @@ def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -308,7 +306,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py new file mode 100644 index 00000000000..29be0bd0e6d --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py @@ -0,0 +1,344 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ManagedPrivateEndpointOperations(object): + """ManagedPrivateEndpointOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ManagedPrivateEndpointListResponse"] + """Lists managed private endpoints. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + if_match=None, # type: Optional[str] + connection_state=None, # type: Optional["models.ConnectionStateProperties"] + fqdns=None, # type: Optional[List[str]] + group_id=None, # type: Optional[str] + private_link_resource_id=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedPrivateEndpointResource" + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :param connection_state: The managed private endpoint connection state. + :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :param fqdns: Fully qualified domain names. + :type fqdns: list[str] + :param group_id: The groupId to which the managed private endpoint is created. + :type group_id: str + :param private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :type private_link_resource_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedPrivateEndpointResource" + """Gets a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py new file mode 100644 index 00000000000..fa043ca3e59 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py @@ -0,0 +1,262 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ManagedVirtualNetworkOperations(object): + """ManagedVirtualNetworkOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ManagedVirtualNetworkListResponse"] + """Lists managed Virtual Networks. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + properties, # type: "models.ManagedVirtualNetwork" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedVirtualNetworkResource" + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param properties: Managed Virtual Network properties. + :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedVirtualNetworkResource" + """Gets a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py index dac22c7ca25..c5cf3d43f6d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py @@ -63,6 +63,10 @@ def list( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore @@ -70,15 +74,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py index 26f3b683429..d82f423f2cb 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py @@ -69,6 +69,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -82,15 +86,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -172,7 +172,6 @@ def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content @@ -201,7 +200,7 @@ def get( if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.PipelineResource" + # type: (...) -> Optional["models.PipelineResource"] """Gets a pipeline. :param resource_group_name: The resource group name. @@ -218,7 +217,7 @@ def get( :rtype: ~data_factory_management_client.models.PipelineResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -243,7 +242,6 @@ def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -305,7 +303,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -393,7 +390,6 @@ def create_run( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: body_content = self._serialize.body(parameters, '{object}') diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py index fb7f49f8589..75634fde5ac 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py @@ -51,7 +51,7 @@ def query_by_factory( factory_name, # type: str last_updated_after, # type: datetime.datetime last_updated_before, # type: datetime.datetime - continuation_token=None, # type: Optional[str] + continuation_token_parameter=None, # type: Optional[str] filters=None, # type: Optional[List["models.RunQueryFilter"]] order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] **kwargs # type: Any @@ -69,9 +69,9 @@ def query_by_factory( :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' format. :type last_updated_before: ~datetime.datetime - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param filters: List of filters. :type filters: list[~data_factory_management_client.models.RunQueryFilter] :param order_by: List of OrderBy option. @@ -85,7 +85,7 @@ def query_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.RunFilterParameters(continuation_token=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) + filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -107,9 +107,8 @@ def query_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'RunFilterParameters') + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -172,7 +171,6 @@ def get( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -238,7 +236,6 @@ def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py index f35784604bf..142f32f2c31 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py @@ -71,6 +71,10 @@ def list_by_factory( api_version = "2018-06-01" def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] # type: ignore @@ -84,15 +88,11 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) + request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): @@ -123,7 +123,7 @@ def query_by_factory( self, resource_group_name, # type: str factory_name, # type: str - continuation_token=None, # type: Optional[str] + continuation_token_parameter=None, # type: Optional[str] parent_trigger_name=None, # type: Optional[str] **kwargs # type: Any ): @@ -134,9 +134,9 @@ def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun triggers. :type parent_trigger_name: str @@ -149,7 +149,7 @@ def query_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token, parent_trigger_name=parent_trigger_name) + filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -171,9 +171,8 @@ def query_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'TriggerFilterParameters') + body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -224,7 +223,7 @@ def create_or_update( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _trigger = models.TriggerResource(properties=properties) + trigger = models.TriggerResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -249,9 +248,8 @@ def create_or_update( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_trigger, 'TriggerResource') + body_content = self._serialize.body(trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) @@ -278,7 +276,7 @@ def get( if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.TriggerResource" + # type: (...) -> Optional["models.TriggerResource"] """Gets a trigger. :param resource_group_name: The resource group name. @@ -295,7 +293,7 @@ def get( :rtype: ~data_factory_management_client.models.TriggerResource or None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -320,7 +318,6 @@ def get( header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -382,7 +379,6 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -403,8 +399,8 @@ def _subscribe_to_event_initial( trigger_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.TriggerSubscriptionOperationStatus" - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -427,7 +423,6 @@ def _subscribe_to_event_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -453,7 +448,7 @@ def begin_subscribe_to_event( trigger_name, # type: str **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] """Subscribe event trigger to events. :param resource_group_name: The resource group name. @@ -463,6 +458,7 @@ def begin_subscribe_to_event( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -477,13 +473,15 @@ def begin_subscribe_to_event( 'polling_interval', self._config.polling_interval ) - raw_result = self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._subscribe_to_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -498,7 +496,15 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore def get_event_subscription_status( @@ -545,7 +551,6 @@ def get_event_subscription_status( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -569,8 +574,8 @@ def _unsubscribe_from_event_initial( trigger_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.TriggerSubscriptionOperationStatus" - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -593,7 +598,6 @@ def _unsubscribe_from_event_initial( header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -619,7 +623,7 @@ def begin_unsubscribe_from_event( trigger_name, # type: str **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] """Unsubscribe event trigger from events. :param resource_group_name: The resource group name. @@ -629,6 +633,7 @@ def begin_unsubscribe_from_event( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -643,13 +648,15 @@ def begin_unsubscribe_from_event( 'polling_interval', self._config.polling_interval ) - raw_result = self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._unsubscribe_from_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -664,7 +671,15 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore def _start_initial( @@ -697,7 +712,6 @@ def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -718,7 +732,7 @@ def begin_start( trigger_name, # type: str **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller[None] """Starts a trigger. :param resource_group_name: The resource group name. @@ -728,6 +742,7 @@ def begin_start( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -742,13 +757,15 @@ def begin_start( 'polling_interval', self._config.polling_interval ) - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -760,7 +777,15 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore def _stop_initial( @@ -793,7 +818,6 @@ def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -814,7 +838,7 @@ def begin_stop( trigger_name, # type: str **kwargs # type: Any ): - # type: (...) -> LROPoller + # type: (...) -> LROPoller[None] """Stops a trigger. :param resource_group_name: The resource group name. @@ -824,6 +848,7 @@ def begin_stop( :param trigger_name: The trigger name. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod @@ -838,13 +863,15 @@ def begin_stop( 'polling_interval', self._config.polling_interval ) - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) @@ -856,5 +883,13 @@ def get_long_running_output(pipeline_response): if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py index 643da837195..3290d8196ab 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py @@ -92,7 +92,6 @@ def rerun( # Construct headers header_parameters = {} # type: Dict[str, Any] - # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -106,13 +105,73 @@ def rerun( rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + def cancel( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + run_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Cancel a single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.cancel.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + def query_by_factory( self, resource_group_name, # type: str factory_name, # type: str last_updated_after, # type: datetime.datetime last_updated_before, # type: datetime.datetime - continuation_token=None, # type: Optional[str] + continuation_token_parameter=None, # type: Optional[str] filters=None, # type: Optional[List["models.RunQueryFilter"]] order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] **kwargs # type: Any @@ -130,9 +189,9 @@ def query_by_factory( :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' format. :type last_updated_before: ~datetime.datetime - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str + :param continuation_token_parameter: The continuation token for getting the next page of + results. Null for first page. + :type continuation_token_parameter: str :param filters: List of filters. :type filters: list[~data_factory_management_client.models.RunQueryFilter] :param order_by: List of OrderBy option. @@ -146,7 +205,7 @@ def query_by_factory( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _filter_parameters = models.RunFilterParameters(continuation_token=continuation_token, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) + filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -168,9 +227,8 @@ def query_by_factory( header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' - # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_filter_parameters, 'RunFilterParameters') + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/setup.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/setup.py deleted file mode 100644 index 8503c7d7e65..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/setup.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# coding: utf-8 - -from setuptools import setup, find_packages - -NAME = "datafactorymanagementclient" -VERSION = "1.0.0" - -# To install the library, run the following -# -# python setup.py install -# -# prerequisite: setuptools -# http://pypi.python.org/pypi/setuptools - -REQUIRES = ["msrest>=0.6.0", "azure-core<2.0.0,>=1.2.0"] - -setup( - name=NAME, - version=VERSION, - description="DataFactoryManagementClient", - author_email="", - url="", - keywords=["Swagger", "DataFactoryManagementClient"], - install_requires=REQUIRES, - packages=find_packages(), - include_package_data=True, - long_description="""\ - The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. - """ -) diff --git a/src/datafactory/report.md b/src/datafactory/report.md index dc10a57884b..af93d925485 100644 --- a/src/datafactory/report.md +++ b/src/datafactory/report.md @@ -1,20 +1,143 @@ # Azure CLI Module Creation Report -### datafactory activity-run query-by-pipeline-run - -query-by-pipeline-run a datafactory activity-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory activity-run|ActivityRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-pipeline-run|QueryByPipelineRun| - -#### Parameters +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az datafactory|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az datafactory` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az datafactory factory|Factories|[commands](#CommandsInFactories)| +|az datafactory integration-runtime|IntegrationRuntimes|[commands](#CommandsInIntegrationRuntimes)| +|az datafactory integration-runtime-node|IntegrationRuntimeNodes|[commands](#CommandsInIntegrationRuntimeNodes)| +|az datafactory linked-service|LinkedServices|[commands](#CommandsInLinkedServices)| +|az datafactory dataset|Datasets|[commands](#CommandsInDatasets)| +|az datafactory pipeline|Pipelines|[commands](#CommandsInPipelines)| +|az datafactory pipeline-run|PipelineRuns|[commands](#CommandsInPipelineRuns)| +|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)| +|az datafactory trigger|Triggers|[commands](#CommandsInTriggers)| +|az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)| + +## COMMANDS +### Commands in `az datafactory activity-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory activity-run query-by-pipeline-run](#ActivityRunsQueryByPipelineRun)|QueryByPipelineRun|[Parameters](#ParametersActivityRunsQueryByPipelineRun)|[Example](#ExamplesActivityRunsQueryByPipelineRun)| + +### Commands in `az datafactory dataset` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory dataset list](#DatasetsListByFactory)|ListByFactory|[Parameters](#ParametersDatasetsListByFactory)|[Example](#ExamplesDatasetsListByFactory)| +|[az datafactory dataset show](#DatasetsGet)|Get|[Parameters](#ParametersDatasetsGet)|[Example](#ExamplesDatasetsGet)| +|[az datafactory dataset create](#DatasetsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDatasetsCreateOrUpdate#Create)|[Example](#ExamplesDatasetsCreateOrUpdate#Create)| +|[az datafactory dataset update](#DatasetsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatasetsCreateOrUpdate#Update)|[Example](#ExamplesDatasetsCreateOrUpdate#Update)| +|[az datafactory dataset delete](#DatasetsDelete)|Delete|[Parameters](#ParametersDatasetsDelete)|[Example](#ExamplesDatasetsDelete)| + +### Commands in `az datafactory factory` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory factory list](#FactoriesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersFactoriesListByResourceGroup)|[Example](#ExamplesFactoriesListByResourceGroup)| +|[az datafactory factory list](#FactoriesList)|List|[Parameters](#ParametersFactoriesList)|[Example](#ExamplesFactoriesList)| +|[az datafactory factory show](#FactoriesGet)|Get|[Parameters](#ParametersFactoriesGet)|[Example](#ExamplesFactoriesGet)| +|[az datafactory factory create](#FactoriesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersFactoriesCreateOrUpdate#Create)|[Example](#ExamplesFactoriesCreateOrUpdate#Create)| +|[az datafactory factory update](#FactoriesUpdate)|Update|[Parameters](#ParametersFactoriesUpdate)|[Example](#ExamplesFactoriesUpdate)| +|[az datafactory factory delete](#FactoriesDelete)|Delete|[Parameters](#ParametersFactoriesDelete)|[Example](#ExamplesFactoriesDelete)| +|[az datafactory factory configure-factory-repo](#FactoriesConfigureFactoryRepo)|ConfigureFactoryRepo|[Parameters](#ParametersFactoriesConfigureFactoryRepo)|[Example](#ExamplesFactoriesConfigureFactoryRepo)| +|[az datafactory factory get-data-plane-access](#FactoriesGetDataPlaneAccess)|GetDataPlaneAccess|[Parameters](#ParametersFactoriesGetDataPlaneAccess)|[Example](#ExamplesFactoriesGetDataPlaneAccess)| +|[az datafactory factory get-git-hub-access-token](#FactoriesGetGitHubAccessToken)|GetGitHubAccessToken|[Parameters](#ParametersFactoriesGetGitHubAccessToken)|[Example](#ExamplesFactoriesGetGitHubAccessToken)| + +### Commands in `az datafactory integration-runtime` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime list](#IntegrationRuntimesListByFactory)|ListByFactory|[Parameters](#ParametersIntegrationRuntimesListByFactory)|[Example](#ExamplesIntegrationRuntimesListByFactory)| +|[az datafactory integration-runtime show](#IntegrationRuntimesGet)|Get|[Parameters](#ParametersIntegrationRuntimesGet)|[Example](#ExamplesIntegrationRuntimesGet)| +|[az datafactory integration-runtime linked-integration-runtime create](#IntegrationRuntimesCreateLinkedIntegrationRuntime)|CreateLinkedIntegrationRuntime|[Parameters](#ParametersIntegrationRuntimesCreateLinkedIntegrationRuntime)|[Example](#ExamplesIntegrationRuntimesCreateLinkedIntegrationRuntime)| +|[az datafactory integration-runtime managed create](#IntegrationRuntimesCreateOrUpdate#Create#Managed)|CreateOrUpdate#Create#Managed|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#Managed)|Not Found| +|[az datafactory integration-runtime self-hosted create](#IntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|CreateOrUpdate#Create#SelfHosted|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|[Example](#ExamplesIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)| +|[az datafactory integration-runtime update](#IntegrationRuntimesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimesUpdate)|[Example](#ExamplesIntegrationRuntimesUpdate)| +|[az datafactory integration-runtime delete](#IntegrationRuntimesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimesDelete)|[Example](#ExamplesIntegrationRuntimesDelete)| +|[az datafactory integration-runtime get-connection-info](#IntegrationRuntimesGetConnectionInfo)|GetConnectionInfo|[Parameters](#ParametersIntegrationRuntimesGetConnectionInfo)|[Example](#ExamplesIntegrationRuntimesGetConnectionInfo)| +|[az datafactory integration-runtime get-monitoring-data](#IntegrationRuntimesGetMonitoringData)|GetMonitoringData|[Parameters](#ParametersIntegrationRuntimesGetMonitoringData)|[Example](#ExamplesIntegrationRuntimesGetMonitoringData)| +|[az datafactory integration-runtime get-status](#IntegrationRuntimesGetStatus)|GetStatus|[Parameters](#ParametersIntegrationRuntimesGetStatus)|[Example](#ExamplesIntegrationRuntimesGetStatus)| +|[az datafactory integration-runtime list-auth-key](#IntegrationRuntimesListAuthKeys)|ListAuthKeys|[Parameters](#ParametersIntegrationRuntimesListAuthKeys)|[Example](#ExamplesIntegrationRuntimesListAuthKeys)| +|[az datafactory integration-runtime regenerate-auth-key](#IntegrationRuntimesRegenerateAuthKey)|RegenerateAuthKey|[Parameters](#ParametersIntegrationRuntimesRegenerateAuthKey)|[Example](#ExamplesIntegrationRuntimesRegenerateAuthKey)| +|[az datafactory integration-runtime remove-link](#IntegrationRuntimesRemoveLinks)|RemoveLinks|[Parameters](#ParametersIntegrationRuntimesRemoveLinks)|[Example](#ExamplesIntegrationRuntimesRemoveLinks)| +|[az datafactory integration-runtime start](#IntegrationRuntimesStart)|Start|[Parameters](#ParametersIntegrationRuntimesStart)|[Example](#ExamplesIntegrationRuntimesStart)| +|[az datafactory integration-runtime stop](#IntegrationRuntimesStop)|Stop|[Parameters](#ParametersIntegrationRuntimesStop)|[Example](#ExamplesIntegrationRuntimesStop)| +|[az datafactory integration-runtime sync-credentials](#IntegrationRuntimesSyncCredentials)|SyncCredentials|[Parameters](#ParametersIntegrationRuntimesSyncCredentials)|[Example](#ExamplesIntegrationRuntimesSyncCredentials)| +|[az datafactory integration-runtime upgrade](#IntegrationRuntimesUpgrade)|Upgrade|[Parameters](#ParametersIntegrationRuntimesUpgrade)|[Example](#ExamplesIntegrationRuntimesUpgrade)| + +### Commands in `az datafactory integration-runtime-node` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime-node show](#IntegrationRuntimeNodesGet)|Get|[Parameters](#ParametersIntegrationRuntimeNodesGet)|[Example](#ExamplesIntegrationRuntimeNodesGet)| +|[az datafactory integration-runtime-node update](#IntegrationRuntimeNodesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimeNodesUpdate)|[Example](#ExamplesIntegrationRuntimeNodesUpdate)| +|[az datafactory integration-runtime-node delete](#IntegrationRuntimeNodesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimeNodesDelete)|[Example](#ExamplesIntegrationRuntimeNodesDelete)| +|[az datafactory integration-runtime-node get-ip-address](#IntegrationRuntimeNodesGetIpAddress)|GetIpAddress|[Parameters](#ParametersIntegrationRuntimeNodesGetIpAddress)|[Example](#ExamplesIntegrationRuntimeNodesGetIpAddress)| + +### Commands in `az datafactory linked-service` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory linked-service list](#LinkedServicesListByFactory)|ListByFactory|[Parameters](#ParametersLinkedServicesListByFactory)|[Example](#ExamplesLinkedServicesListByFactory)| +|[az datafactory linked-service show](#LinkedServicesGet)|Get|[Parameters](#ParametersLinkedServicesGet)|[Example](#ExamplesLinkedServicesGet)| +|[az datafactory linked-service create](#LinkedServicesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Create)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Create)| +|[az datafactory linked-service update](#LinkedServicesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Update)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Update)| +|[az datafactory linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)| + +### Commands in `az datafactory pipeline` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline list](#PipelinesListByFactory)|ListByFactory|[Parameters](#ParametersPipelinesListByFactory)|[Example](#ExamplesPipelinesListByFactory)| +|[az datafactory pipeline show](#PipelinesGet)|Get|[Parameters](#ParametersPipelinesGet)|[Example](#ExamplesPipelinesGet)| +|[az datafactory pipeline create](#PipelinesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPipelinesCreateOrUpdate#Create)|[Example](#ExamplesPipelinesCreateOrUpdate#Create)| +|[az datafactory pipeline update](#PipelinesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPipelinesCreateOrUpdate#Update)|[Example](#ExamplesPipelinesCreateOrUpdate#Update)| +|[az datafactory pipeline delete](#PipelinesDelete)|Delete|[Parameters](#ParametersPipelinesDelete)|[Example](#ExamplesPipelinesDelete)| +|[az datafactory pipeline create-run](#PipelinesCreateRun)|CreateRun|[Parameters](#ParametersPipelinesCreateRun)|[Example](#ExamplesPipelinesCreateRun)| + +### Commands in `az datafactory pipeline-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline-run show](#PipelineRunsGet)|Get|[Parameters](#ParametersPipelineRunsGet)|[Example](#ExamplesPipelineRunsGet)| +|[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)| +|[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)| + +### Commands in `az datafactory trigger` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger list](#TriggersListByFactory)|ListByFactory|[Parameters](#ParametersTriggersListByFactory)|[Example](#ExamplesTriggersListByFactory)| +|[az datafactory trigger show](#TriggersGet)|Get|[Parameters](#ParametersTriggersGet)|[Example](#ExamplesTriggersGet)| +|[az datafactory trigger create](#TriggersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersTriggersCreateOrUpdate#Create)|[Example](#ExamplesTriggersCreateOrUpdate#Create)| +|[az datafactory trigger update](#TriggersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersTriggersCreateOrUpdate#Update)|[Example](#ExamplesTriggersCreateOrUpdate#Update)| +|[az datafactory trigger delete](#TriggersDelete)|Delete|[Parameters](#ParametersTriggersDelete)|[Example](#ExamplesTriggersDelete)| +|[az datafactory trigger get-event-subscription-status](#TriggersGetEventSubscriptionStatus)|GetEventSubscriptionStatus|[Parameters](#ParametersTriggersGetEventSubscriptionStatus)|[Example](#ExamplesTriggersGetEventSubscriptionStatus)| +|[az datafactory trigger query-by-factory](#TriggersQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggersQueryByFactory)|[Example](#ExamplesTriggersQueryByFactory)| +|[az datafactory trigger start](#TriggersStart)|Start|[Parameters](#ParametersTriggersStart)|[Example](#ExamplesTriggersStart)| +|[az datafactory trigger stop](#TriggersStop)|Stop|[Parameters](#ParametersTriggersStop)|[Example](#ExamplesTriggersStop)| +|[az datafactory trigger subscribe-to-event](#TriggersSubscribeToEvents)|SubscribeToEvents|[Parameters](#ParametersTriggersSubscribeToEvents)|[Example](#ExamplesTriggersSubscribeToEvents)| +|[az datafactory trigger unsubscribe-from-event](#TriggersUnsubscribeFromEvents)|UnsubscribeFromEvents|[Parameters](#ParametersTriggersUnsubscribeFromEvents)|[Example](#ExamplesTriggersUnsubscribeFromEvents)| + +### Commands in `az datafactory trigger-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger-run cancel](#TriggerRunsCancel)|Cancel|[Parameters](#ParametersTriggerRunsCancel)|[Example](#ExamplesTriggerRunsCancel)| +|[az datafactory trigger-run query-by-factory](#TriggerRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggerRunsQueryByFactory)|[Example](#ExamplesTriggerRunsQueryByFactory)| +|[az datafactory trigger-run rerun](#TriggerRunsRerun)|Rerun|[Parameters](#ParametersTriggerRunsRerun)|[Example](#ExamplesTriggerRunsRerun)| + + +## COMMAND DETAILS + +### group `az datafactory activity-run` +#### Command `az datafactory activity-run query-by-pipeline-run` + +##### Example +``` +az datafactory activity-run query-by-pipeline-run --factory-name "exampleFactoryName" --last-updated-after \ +"2018-06-16T00:36:44.3345758Z" --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group \ +"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -26,129 +149,133 @@ query-by-pipeline-run a datafactory activity-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory dataset create - -create a datafactory dataset. +### group `az datafactory dataset` +#### Command `az datafactory dataset list` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| +##### Example +``` +az datafactory dataset list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory dataset show` -#### Parameters +##### Example +``` +az datafactory dataset show --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--properties**|object|Dataset properties.|properties|properties| -|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory dataset delete - -delete a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory dataset create` + +##### Example +``` +az datafactory dataset create --properties "{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{\\"type\\":\\"LinkedSe\ +rviceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"},\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Str\ +ing\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}},\\"typeProperties\\":{\\"format\\":{\\"type\\":\\"TextFormat\\"},\ +\\"fileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"},\\"folderPath\\":{\\"type\\":\\"Ex\ +pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--properties**|object|Dataset properties.|properties|properties| +|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -### datafactory dataset list - -list a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| +#### Command `az datafactory dataset update` -#### Parameters +##### Example +``` +az datafactory dataset update --description "Example description" --linked-service-name "{\\"type\\":\\"LinkedServiceRe\ +ference\\",\\"referenceName\\":\\"exampleLinkedService\\"}" --parameters "{\\"MyFileName\\":{\\"type\\":\\"String\\"},\ +\\"MyFolderPath\\":{\\"type\\":\\"String\\"}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory dataset show - -show a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--linked-service-name**|object|Linked service reference.|linked_service_name|linkedServiceName| +|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Dataset description.|description|description| +|**--structure**|any|Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement.|structure|structure| +|**--schema**|any|Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement.|schema|schema| +|**--parameters**|dictionary|Parameters for dataset.|parameters|parameters| +|**--annotations**|array|List of tags that can be used for describing the Dataset.|annotations|annotations| +|**--folder**|object|The folder that this Dataset is in. If not specified, Dataset will appear at the root level.|folder|folder| + +#### Command `az datafactory dataset delete` + +##### Example +``` +az datafactory dataset delete --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory factory configure-factory-repo -configure-factory-repo a datafactory factory. +### group `az datafactory factory` +#### Command `az datafactory factory list` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|configure-factory-repo|ConfigureFactoryRepo| - -#### Parameters +##### Example +``` +az datafactory factory list --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| -|**--location**|string|The location identifier.|location|locationId| -|**--factory-resource-id**|string|The factory resource id.|factory_resource_id|factoryResourceId| -|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| -|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -### datafactory factory create +#### Command `az datafactory factory list` -create a datafactory factory. +##### Example +``` +az datafactory factory list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az datafactory factory show` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +##### Example +``` +az datafactory factory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory factory create` -#### Parameters +##### Example +``` +az datafactory factory create --location "East US" --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -159,42 +286,60 @@ create a datafactory factory. |**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| |**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| |**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| +|**--public-network-access**|choice|Whether or not public network access is allowed for the data factory.|public_network_access|publicNetworkAccess| -### datafactory factory delete - -delete a datafactory factory. +#### Command `az datafactory factory update` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +##### Example +``` +az datafactory factory update --name "exampleFactoryName" --tags exampleTag="exampleValue" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--tags**|dictionary|The resource tags.|tags|tags| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory factory delete` -#### Parameters +##### Example +``` +az datafactory factory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -### datafactory factory get-data-plane-access +#### Command `az datafactory factory configure-factory-repo` -get-data-plane-access a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +##### Example +``` +az datafactory factory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc\ +/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ +--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ +repository-name="repo" root-folder="/" tenant-id="" --location "East US" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location identifier.|location|locationId| +|**--factory-resource-id**|string|The factory resource id.|factory_resource_id|factoryResourceId| +|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| +|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-data-plane-access|GetDataPlaneAccess| +#### Command `az datafactory factory get-data-plane-access` -#### Parameters +##### Example +``` +az datafactory factory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" --expire-time \ +"2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ +"2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -205,21 +350,14 @@ get-data-plane-access a datafactory factory. |**--start-time**|string|Start time for the token. If not specified the current time will be used.|start_time|startTime| |**--expire-time**|string|Expiration time for the token. Maximum duration for the token is eight hours and by default the token will expire in eight hours.|expire_time|expireTime| -### datafactory factory get-git-hub-access-token - -get-git-hub-access-token a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-git-hub-access-token|GetGitHubAccessToken| +#### Command `az datafactory factory get-git-hub-access-token` -#### Parameters +##### Example +``` +az datafactory factory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" \ +--git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -228,455 +366,297 @@ get-git-hub-access-token a datafactory factory. |**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url|gitHubAccessTokenBaseUrl| |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId| -### datafactory factory list - -list a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByResourceGroup| -|list|List| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| - -### datafactory factory show - -show a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +### group `az datafactory integration-runtime` +#### Command `az datafactory integration-runtime list` -#### Parameters +##### Example +``` +az datafactory integration-runtime list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory factory update - -update a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +#### Command `az datafactory integration-runtime show` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| - -#### Parameters +##### Example +``` +az datafactory integration-runtime show --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--tags**|dictionary|The resource tags.|tags|tags| - -### datafactory integration-runtime delete - -delete a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory integration-runtime linked-integration-runtime create` + +##### Example +``` +az datafactory integration-runtime linked-integration-runtime create --name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" \ +--location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" --subscription-id \ +"061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --subscription-id "12345678-1234-1234-1234-12345678\ +abc" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--name**|string|The name of the linked integration runtime.|name|name| +|**--subscription-id**|string|The ID of the subscription that the linked integration runtime belongs to.|subscription_id|subscriptionId| +|**--data-factory-name**|string|The name of the data factory that the linked integration runtime belongs to.|data_factory_name|dataFactoryName| +|**--location**|string|The location of the data factory that the linked integration runtime belongs to.|location|dataFactoryLocation| -### datafactory integration-runtime get-connection-info - -get-connection-info a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-connection-info|GetConnectionInfo| +#### Command `az datafactory integration-runtime managed create` -#### Parameters +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|managed_description|description| +|**--type-properties-compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| +|**--type-properties-ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| -### datafactory integration-runtime get-monitoring-data - -get-monitoring-data a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-monitoring-data|GetMonitoringData| +#### Command `az datafactory integration-runtime self-hosted create` -#### Parameters +##### Example +``` +az datafactory integration-runtime self-hosted create --factory-name "exampleFactoryName" --description "A selfhosted \ +integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|self_hosted_description|description| +|**--type-properties-linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| -### datafactory integration-runtime get-status - -get-status a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-status|GetStatus| +#### Command `az datafactory integration-runtime update` -#### Parameters +##### Example +``` +az datafactory integration-runtime update --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" --auto-update "Off" --update-delay-offset "\\"PT3H\\"" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| +|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| -### datafactory integration-runtime linked-integration-runtime create - -linked-integration-runtime create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|linked-integration-runtime create|CreateLinkedIntegrationRuntime| +#### Command `az datafactory integration-runtime delete` -#### Parameters +##### Example +``` +az datafactory integration-runtime delete --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--name**|string|The name of the linked integration runtime.|name|name| -|**--subscription-id**|string|The ID of the subscription that the linked integration runtime belongs to.|subscription_id|subscriptionId| -|**--data-factory-name**|string|The name of the data factory that the linked integration runtime belongs to.|data_factory_name|dataFactoryName| -|**--location**|string|The location of the data factory that the linked integration runtime belongs to.|location|dataFactoryLocation| - -### datafactory integration-runtime list -list a datafactory integration-runtime. +#### Command `az datafactory integration-runtime get-connection-info` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters +##### Example +``` +az datafactory integration-runtime get-connection-info --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime list-auth-key - -list-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list-auth-key|ListAuthKeys| +#### Command `az datafactory integration-runtime get-monitoring-data` -#### Parameters +##### Example +``` +az datafactory integration-runtime get-monitoring-data --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime managed create - -managed create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +#### Command `az datafactory integration-runtime get-status` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|managed create|CreateOrUpdate#Create#Managed| - -#### Parameters +##### Example +``` +az datafactory integration-runtime get-status --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|managed_description|description| -|**--type-properties-compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| -|**--type-properties-ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| - -### datafactory integration-runtime regenerate-auth-key - -regenerate-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|regenerate-auth-key|RegenerateAuthKey| +#### Command `az datafactory integration-runtime list-auth-key` -#### Parameters +##### Example +``` +az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| - -### datafactory integration-runtime remove-link -remove-link a datafactory integration-runtime. +#### Command `az datafactory integration-runtime regenerate-auth-key` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|remove-link|RemoveLinks| - -#### Parameters +##### Example +``` +az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| - -### datafactory integration-runtime self-hosted create - -self-hosted create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|self-hosted create|CreateOrUpdate#Create#SelfHosted| +#### Command `az datafactory integration-runtime remove-link` -#### Parameters +##### Example +``` +az datafactory integration-runtime remove-link --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--linked-factory-name "exampleFactoryName-linked" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|self_hosted_description|description| -|**--type-properties-linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| - -### datafactory integration-runtime show - -show a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory integration-runtime start` -#### Parameters +##### Example +``` +az datafactory integration-runtime start --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory integration-runtime start -start a datafactory integration-runtime. +#### Command `az datafactory integration-runtime stop` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| - -#### Parameters +##### Example +``` +az datafactory integration-runtime stop --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime stop - -stop a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +#### Command `az datafactory integration-runtime sync-credentials` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| - -#### Parameters +##### Example +``` +az datafactory integration-runtime sync-credentials --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime sync-credentials - -sync-credentials a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|sync-credentials|SyncCredentials| +#### Command `az datafactory integration-runtime upgrade` -#### Parameters +##### Example +``` +az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime update - -update a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +### group `az datafactory integration-runtime-node` +#### Command `az datafactory integration-runtime-node show` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| - -#### Parameters +##### Example +``` +az datafactory integration-runtime-node show --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| -|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| - -### datafactory integration-runtime upgrade - -upgrade a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--node-name**|string|The integration runtime node name.|node_name|nodeName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|upgrade|Upgrade| +#### Command `az datafactory integration-runtime-node update` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node update --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" --concurrent-jobs-limit 2 +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--node-name**|string|The integration runtime node name.|node_name|nodeName| +|**--concurrent-jobs-limit**|integer|The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.|concurrent_jobs_limit|concurrentJobsLimit| -### datafactory integration-runtime-node delete - -delete a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory integration-runtime-node delete` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node delete --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -684,21 +664,14 @@ delete a datafactory integration-runtime-node. |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| -### datafactory integration-runtime-node get-ip-address +#### Command `az datafactory integration-runtime-node get-ip-address` -get-ip-address a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-ip-address|GetIpAddress| - -#### Parameters +##### Example +``` +az datafactory integration-runtime-node get-ip-address --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -706,66 +679,44 @@ get-ip-address a datafactory integration-runtime-node. |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| -### datafactory integration-runtime-node show - -show a datafactory integration-runtime-node. +### group `az datafactory linked-service` +#### Command `az datafactory linked-service list` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory linked-service list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--node-name**|string|The integration runtime node name.|node_name|nodeName| - -### datafactory integration-runtime-node update -update a datafactory integration-runtime-node. +#### Command `az datafactory linked-service show` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| - -#### Parameters +##### Example +``` +az datafactory linked-service show --factory-name "exampleFactoryName" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--node-name**|string|The integration runtime node name.|node_name|nodeName| -|**--concurrent-jobs-limit**|integer|The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.|concurrent_jobs_limit|concurrentJobsLimit| - -### datafactory linked-service create - -create a datafactory linked-service. +|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| +|**--if-none-match**|string|ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +#### Command `az datafactory linked-service create` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters +##### Example +``` +az datafactory linked-service create --factory-name "exampleFactoryName" --properties "{\\"type\\":\\"AzureStorage\\",\ +\\"typeProperties\\":{\\"connectionString\\":{\\"type\\":\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=htt\ +ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -774,226 +725,179 @@ create a datafactory linked-service. |**--properties**|object|Properties of linked service.|properties|properties| |**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -### datafactory linked-service delete - -delete a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory linked-service update` -#### Parameters +##### Example +``` +az datafactory linked-service update --factory-name "exampleFactoryName" --description "Example description" --name \ +"exampleLinkedService" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| - -### datafactory linked-service list - -list a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters +|**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--connect-via**|object|The integration runtime reference.|connect_via|connectVia| +|**--description**|string|Linked service description.|description|description| +|**--parameters**|dictionary|Parameters for linked service.|parameters|parameters| +|**--annotations**|array|List of tags that can be used for describing the linked service.|annotations|annotations| + +#### Command `az datafactory linked-service delete` + +##### Example +``` +az datafactory linked-service delete --factory-name "exampleFactoryName" --name "exampleLinkedService" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -### datafactory linked-service show - -show a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +### group `az datafactory pipeline` +#### Command `az datafactory pipeline list` -#### Parameters +##### Example +``` +az datafactory pipeline list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -|**--if-none-match**|string|ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory pipeline create -create a datafactory pipeline. +#### Command `az datafactory pipeline show` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters +##### Example +``` +az datafactory pipeline show --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| -|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory pipeline create-run - -create-run a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create-run|CreateRun| +|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory pipeline create` + +##### Example +``` +az datafactory pipeline create --factory-name "exampleFactoryName" --pipeline "{\\"activities\\":[{\\"name\\":\\"Exampl\ +eForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\":\\"ExampleCopyActivity\ +\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":\\"exampl\ +econtainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"outputs\\":[\ +{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@item\ +()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"typeProperties\\":{\\"d\ +ataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"BlobSource\\"}}}],\\"isSeq\ +uential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}\ +],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}},\\"variabl\ +es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\"JobId\\":{\\"type\\":\\"Expression\\",\ +\\"value\\":\\"@pipeline().parameters.JobId\\"}}}" --name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| -|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| -|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| -|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| -|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| - -### datafactory pipeline delete - -delete a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| +|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Parameters +#### Command `az datafactory pipeline update` + +##### Example +``` +az datafactory pipeline update --factory-name "exampleFactoryName" --description "Example description" --activities \ +"[{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\"\ +:\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"\ +MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDatas\ +et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\ +\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\\ +"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\ +obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\ +OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --name "examplePipeline" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| - -### datafactory pipeline list - -list a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters +|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|The description of the pipeline.|description|description| +|**--activities**|array|List of activities in pipeline.|activities|activities| +|**--parameters**|dictionary|List of parameters for pipeline.|parameters|parameters| +|**--variables**|dictionary|List of variables for pipeline.|variables|variables| +|**--concurrency**|integer|The max number of concurrent runs for the pipeline.|concurrency|concurrency| +|**--annotations**|array|List of tags that can be used for describing the Pipeline.|annotations|annotations| +|**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions| +|**--folder-name**|string|The name of the folder that this Pipeline is in.|name|name| + +#### Command `az datafactory pipeline delete` + +##### Example +``` +az datafactory pipeline delete --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -### datafactory pipeline show - -show a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| +#### Command `az datafactory pipeline create-run` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory pipeline create-run --factory-name "exampleFactoryName" --parameters "{\\"OutputBlobNameList\\":[\\"exam\ +pleoutput.csv\\"]}" --name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory pipeline update - -update a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| +|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| +|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| +|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| +|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| +|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +### group `az datafactory pipeline-run` +#### Command `az datafactory pipeline-run show` -#### Parameters +##### Example +``` +az datafactory pipeline-run show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|The description of the pipeline.|pipeline_description|description| -|**--activities**|array|List of activities in pipeline.|pipeline_activities|activities| -|**--parameters**|dictionary|List of parameters for pipeline.|pipeline_parameters|parameters| -|**--variables**|dictionary|List of variables for pipeline.|pipeline_variables|variables| -|**--concurrency**|integer|The max number of concurrent runs for the pipeline.|pipeline_concurrency|concurrency| -|**--annotations**|array|List of tags that can be used for describing the Pipeline.|pipeline_annotations|annotations| -|**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|pipeline_run_dimensions|runDimensions| -|**--folder-name**|string|The name of the folder that this Pipeline is in.|pipeline_name_properties_folder_name|name| - -### datafactory pipeline-run cancel - -cancel a datafactory pipeline-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| +|**--run-id**|string|The pipeline run identifier.|run_id|runId| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|cancel|Cancel| +#### Command `az datafactory pipeline-run cancel` -#### Parameters +##### Example +``` +az datafactory pipeline-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1001,21 +905,15 @@ cancel a datafactory pipeline-run. |**--run-id**|string|The pipeline run identifier.|run_id|runId| |**--is-recursive**|boolean|If true, cancel all the Child pipelines that are triggered by the current pipeline.|is_recursive|isRecursive| -### datafactory pipeline-run query-by-factory - -query-by-factory a datafactory pipeline-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory pipeline-run query-by-factory` -#### Parameters +##### Example +``` +az datafactory pipeline-run query-by-factory --factory-name "exampleFactoryName" --filters operand="PipelineName" \ +operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1026,42 +924,45 @@ query-by-factory a datafactory pipeline-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory pipeline-run show - -show a datafactory pipeline-run. +### group `az datafactory trigger` +#### Command `az datafactory trigger list` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--run-id**|string|The pipeline run identifier.|run_id|runId| - -### datafactory trigger create -create a datafactory trigger. +#### Command `az datafactory trigger show` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +##### Example +``` +az datafactory trigger show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory trigger create` + +##### Example +``` +az datafactory trigger create --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --properties \ +"{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\\ +"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"examplePipeline\\"}}],\\"typePro\ +perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\ +\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1070,83 +971,59 @@ create a datafactory trigger. |**--properties**|object|Properties of the trigger.|properties|properties| |**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -### datafactory trigger delete - -delete a datafactory trigger. +#### Command `az datafactory trigger update` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory trigger update --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--description "Example description" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Trigger description.|description|description| +|**--annotations**|array|List of tags that can be used for describing the trigger.|annotations|annotations| -### datafactory trigger get-event-subscription-status - -get-event-subscription-status a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-event-subscription-status|GetEventSubscriptionStatus| +#### Command `az datafactory trigger delete` -#### Parameters +##### Example +``` +az datafactory trigger delete --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger list - -list a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| +#### Command `az datafactory trigger get-event-subscription-status` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters +##### Example +``` +az datafactory trigger get-event-subscription-status --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger query-by-factory - -query-by-factory a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory trigger query-by-factory` -#### Parameters +##### Example +``` +az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --parent-trigger-name "exampleTrigger" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1154,127 +1031,87 @@ query-by-factory a datafactory trigger. |**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| |**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name|parentTriggerName| -### datafactory trigger show - -show a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory trigger start` -#### Parameters +##### Example +``` +az datafactory trigger start --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory trigger start - -start a datafactory trigger. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| +#### Command `az datafactory trigger stop` -#### Parameters +##### Example +``` +az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger stop +#### Command `az datafactory trigger subscribe-to-event` -stop a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| - -#### Parameters +##### Example +``` +az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger subscribe-to-event - -subscribe-to-event a datafactory trigger. +#### Command `az datafactory trigger unsubscribe-from-event` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|subscribe-to-event|SubscribeToEvents| - -#### Parameters +##### Example +``` +az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger unsubscribe-from-event - -unsubscribe-from-event a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|unsubscribe-from-event|UnsubscribeFromEvents| +### group `az datafactory trigger-run` +#### Command `az datafactory trigger-run cancel` -#### Parameters +##### Example +``` +az datafactory trigger-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--run-id**|string|The pipeline run identifier.|run_id|runId| -### datafactory trigger-run query-by-factory - -query-by-factory a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| +#### Command `az datafactory trigger-run query-by-factory` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| - -#### Parameters +##### Example +``` +az datafactory trigger-run query-by-factory --factory-name "exampleFactoryName" --filters operand="TriggerName" \ +operator="Equals" values="exampleTrigger" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1285,21 +1122,14 @@ query-by-factory a datafactory trigger-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory trigger-run rerun - -rerun a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|rerun|Rerun| +#### Command `az datafactory trigger-run rerun` -#### Parameters +##### Example +``` +az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| diff --git a/src/datafactory/setup.py b/src/datafactory/setup.py index 63351e791f3..a26a3db9b84 100644 --- a/src/datafactory/setup.py +++ b/src/datafactory/setup.py @@ -12,7 +12,7 @@ # HISTORY.rst entry. VERSION = '0.1.0' try: - from .manual.version import VERSION + from azext_datafactory.manual.version import VERSION except ImportError: pass @@ -31,8 +31,9 @@ ] DEPENDENCIES = [] + try: - from .manual.dependency import DEPENDENCIES + from azext_datafactory.manual.dependency import DEPENDENCIES except ImportError: pass