diff --git a/src/machinelearningservices/HISTORY.rst b/src/machinelearningservices/HISTORY.rst
new file mode 100644
index 00000000000..1c139576ba0
--- /dev/null
+++ b/src/machinelearningservices/HISTORY.rst
@@ -0,0 +1,8 @@
+.. :changelog:
+
+Release History
+===============
+
+0.1.0
+++++++
+* Initial release.
diff --git a/src/machinelearningservices/README.md b/src/machinelearningservices/README.md
new file mode 100644
index 00000000000..20da88b1045
--- /dev/null
+++ b/src/machinelearningservices/README.md
@@ -0,0 +1,866 @@
+# Azure CLI machinelearningservices Extension #
+This is the extension for machinelearningservices
+
+### How to use ###
+Install this extension using the below CLI command
+```
+az extension add --name machinelearningservices
+```
+
+### Included Features ###
+#### machinelearningservices workspace ####
+##### Create #####
+```
+az machinelearningservices workspace create --type "SystemAssigned" --location "eastus2euap" \
+ --description "test description" \
+ --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/microsoft.insights/components/testinsights" \
+ --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \
+ --key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --status "Enabled" --friendly-name "HelloName" --hbi-workspace false \
+ --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" \
+ --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" \
+ --sku name="Basic" tier="Basic" --resource-group "workspace-1234" --name "testworkspace"
+
+az machinelearningservices workspace wait --created --resource-group "{rg}" --name "{myWorkspace}"
+```
+##### Show #####
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Update #####
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+ --sku name="Enterprise" tier="Enterprise" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List-key #####
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Resync-key #####
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+#### machinelearningservices workspace-feature ####
+##### List #####
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+#### machinelearningservices usage ####
+##### List #####
+```
+az machinelearningservices usage list --location "eastus"
+```
+#### machinelearningservices virtual-machine-size ####
+##### List #####
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+#### machinelearningservices quota ####
+##### List #####
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Update #####
+```
+az machinelearningservices quota update --location "eastus" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+```
+#### machinelearningservices machine-learning-compute ####
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" \
+ --compute-instance-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" \
+ --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### List #####
+```
+az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Update #####
+```
+az machinelearningservices machine-learning-compute update --compute-name "compute123" \
+ --scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### List-key #####
+```
+az machinelearningservices machine-learning-compute list-key --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List-node #####
+```
+az machinelearningservices machine-learning-compute list-node --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Restart #####
+```
+az machinelearningservices machine-learning-compute restart --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Start #####
+```
+az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Stop #####
+```
+az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group "testrg123" \
+ --underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+#### machinelearningservices ####
+##### List-sku #####
+```
+az machinelearningservices list-sku
+```
+#### machinelearningservices private-endpoint-connection ####
+##### Put #####
+```
+az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+ --private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices private-link-resource ####
+##### List #####
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices linked-service ####
+##### Create #####
+```
+az machinelearningservices linked-service create --link-name "link-1" --name "link-1" --type "SystemAssigned" \
+ --location "westus" \
+ --properties linked-service-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1/providers/Microsoft.Synapse/workspaces/Syn-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Show #####
+```
+az machinelearningservices linked-service show --link-name "link-1" --resource-group "resourceGroup-1" \
+ --workspace-name "workspace-1"
+```
+##### List #####
+```
+az machinelearningservices linked-service list --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Delete #####
+```
+az machinelearningservices linked-service delete --link-name "link-1" --resource-group "resourceGroup-1" \
+ --workspace-name "workspace-1"
+```
+#### machinelearningservices machine-learning-service ####
+##### Create #####
+```
+az machinelearningservices machine-learning-service create \
+ --properties "{\\"appInsightsEnabled\\":true,\\"authEnabled\\":true,\\"computeType\\":\\"ACI\\",\\"containerResourceRequirements\\":{\\"cpu\\":1,\\"memoryInGB\\":1},\\"environmentImageRequest\\":{\\"assets\\":[{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":false,\\"url\\":\\"aml://storage/azureml/score.py\\"}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{\\"name\\":\\"AzureML-Scikit-learn-0.20.3\\",\\"docker\\":{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azureml/base:openmpi3.1.2-ubuntu16.04\\",\\"baseImageRegistry\\":{\\"address\\":null,\\"password\\":null,\\"username\\":null}},\\"environmentVariables\\":{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"},\\"inferencingStackVersion\\":null,\\"python\\":{\\"baseCondaEnvironment\\":null,\\"condaDependencies\\":{\\"name\\":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"channels\\":[\\"conda-forge\\"],\\"dependencies\\":[\\"python=3.6.2\\",{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml-defaults==1.0.69\\",\\"azureml-telemetry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-train-core==1.0.69\\",\\"scikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}]},\\"interpreterPath\\":\\"python\\",\\"userManagedDependencies\\":false},\\"spark\\":{\\"packages\\":[],\\"precachePackages\\":true,\\"repositories\\":[]},\\"version\\":\\"3\\"},\\"models\\":[{\\"name\\":\\"sklearn_regression_model.pkl\\",\\"mimeType\\":\\"application/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_regression_model.pkl\\"}]},\\"location\\":\\"eastus2\\"}" \
+ --resource-group "testrg123" --service-name "service456" --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-service show --resource-group "testrg123" --service-name "service123" \
+ --workspace-name "workspaces123"
+```
+##### List #####
+```
+az machinelearningservices machine-learning-service list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices machine-learning-service delete --resource-group "testrg123" --service-name "service123" \
+ --workspace-name "workspaces123"
+```
+#### machinelearningservices notebook ####
+##### List-key #####
+```
+az machinelearningservices notebook list-key --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Prepare #####
+```
+az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name "workspaces123"
+```
+#### machinelearningservices workspace-connection ####
+##### Create #####
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --name "connection-1" \
+ --auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Show #####
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### List #####
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" \
+ --target "www.facebook.com" --workspace-name "workspace-1"
+```
+##### Delete #####
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+#### machinelearningservices code-container ####
+##### Create #####
+```
+az machinelearningservices code-container create --name "testContainer" \
+ --properties description="string" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices code-container show --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices code-container list --skiptoken "skiptoken" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices code-container delete --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices code-version ####
+##### Create #####
+```
+az machinelearningservices code-version create --name "testContainer" \
+ --properties description="string" assetPath={"path":"string","isDirectory":true} datastoreId="string" properties={"prop1":"value1","prop2":"value2"} tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices code-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices code-version list --name "testContainer" --skiptoken "skiptoken" \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices code-version delete --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices component-container ####
+##### Create #####
+```
+az machinelearningservices component-container create --name "testContainer" \
+ --properties description="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices component-container show --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices component-container list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices component-container delete --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices component-version ####
+##### Create #####
+```
+az machinelearningservices component-version create --name "testContainer" \
+ --properties description="string" codeConfiguration={"codeArtifactId":"string","command":"string"} component={"componentType":"CommandComponent","displayName":"string","inputs":{"additionalProp1":{"description":"string","default":"string","componentInputType":"Generic","dataType":"string","optional":true},"additionalProp2":{"description":"string","default":"string","componentInputType":"Generic","dataType":"string","optional":true},"additionalProp3":{"description":"string","default":"string","componentInputType":"Generic","dataType":"string","optional":true}},"isDeterministic":true,"outputs":{"additionalProp1":{"description":"string","dataType":"string"},"additionalProp2":{"description":"string","dataType":"string"},"additionalProp3":{"description":"string","dataType":"string"}}} environmentId="\\"/subscriptions/{{subscriptionId}}/resourceGroups/{{resourceGroup}}/providers/Microsoft.MachineLearningServices/workspaces/{{workspaceName}}/Environments/AzureML-Minimal\\"" generatedBy="User" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices component-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices component-version list --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices component-version delete --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices data-container ####
+##### Create #####
+```
+az machinelearningservices data-container create --name "datacontainer123" \
+ --properties description="string" properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices data-container show --name "datacontainer123" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices data-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices data-container delete --name "datacontainer123" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices datastore ####
+##### Create #####
+```
+az machinelearningservices datastore create --name "testDatastore" \
+ --properties description="string" contents={"azureDataLake":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"storeName":"string"},"azureMySql":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"databaseName":"string","endpoint":"database.windows.net","portNumber":0,"serverName":"string"},"azurePostgreSql":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"databaseName":"string","enableSSL":true,"endpoint":"database.windows.net","portNumber":0,"serverName":"string"},"azureSqlDatabase":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"databaseName":"string","endpoint":"database.windows.net","portNumber":0,"serverName":"string"},"azureStorage":{"accountName":"string","blobCacheTimeout":0,"containerName":"string","credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"endpoint":"core.windows.net","protocol":"https"},"datastoreContentsType":"AzureBlob","glusterFs":{"serverAddress":"string","volumeName":"string"}} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices datastore show --name "testDatastore" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices datastore list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### List-secret #####
+```
+az machinelearningservices datastore list-secret --name "testDatastore" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices datastore delete --name "testDatastore" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices data-version ####
+##### Create #####
+```
+az machinelearningservices data-version create --name "dataset123" \
+ --properties description="string" assetPath={"path":"string","isDirectory":false} datasetType="Simple" datastoreId="string" properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --version "456" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices data-version show --name "dataset123" --resource-group "testrg123" --version "456" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices data-version list --name "dataset123" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices data-version delete --name "dataset123" --resource-group "testrg123" --version "456" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices environment-container ####
+##### Create #####
+```
+az machinelearningservices environment-container create --name "testContainer" \
+ --properties description="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices environment-container show --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices environment-container list --skiptoken "skiptoken" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices environment-container delete --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices environment-specification-version ####
+##### Create #####
+```
+az machinelearningservices environment-specification-version create --name "testContainer" \
+ --properties description="string" condaFile="string" docker={"dockerSpecificationType":"Build","dockerfile":"string"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices environment-specification-version show --name "testContainer" --resource-group "testrg123" \
+ --version "1" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices environment-specification-version list --name "testContainer" --skiptoken "skiptoken" \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices environment-specification-version delete --name "testContainer" \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+#### machinelearningservices job ####
+##### Create #####
+```
+az machinelearningservices job create \
+ --properties "{\\"description\\":\\"string\\",\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}}" \
+ --id "testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Create #####
+```
+az machinelearningservices job create \
+ --properties "{\\"description\\":\\"string\\",\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}}" \
+ --id "testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices job show --id "testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices job show --id "testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices job list --skiptoken "skiptoken" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices job list --skiptoken "skiptoken" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Cancel #####
+```
+az machinelearningservices job cancel --id "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Cancel #####
+```
+az machinelearningservices job cancel --id "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices job delete --id "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices job delete --id "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices labeling-job ####
+##### Create #####
+```
+az machinelearningservices labeling-job create \
+ --properties description="string" datasetConfiguration={"assetName":"string","datasetVersion":"string","incrementalDatasetRefreshEnabled":true} jobInstructions={"uri":"string"} jobType="Labeling" labelCategories={"additionalProp1":{"allowMultiSelect":true,"classes":{"additionalProp1":{"displayName":"string","subclasses":{}},"additionalProp2":{"displayName":"string","subclasses":{}},"additionalProp3":{"displayName":"string","subclasses":{}}},"displayName":"string"},"additionalProp2":{"allowMultiSelect":true,"classes":{"additionalProp1":{"displayName":"string","subclasses":{}},"additionalProp2":{"displayName":"string","subclasses":{}},"additionalProp3":{"displayName":"string","subclasses":{}}},"displayName":"string"},"additionalProp3":{"allowMultiSelect":true,"classes":{"additionalProp1":{"displayName":"string","subclasses":{}},"additionalProp2":{"displayName":"string","subclasses":{}},"additionalProp3":{"displayName":"string","subclasses":{}}},"displayName":"string"}} labelingJobMediaProperties={"mediaType":"Image"} mlAssistConfiguration={"inferencingComputeBinding":{"computeId":"string","nodeCount":0},"mlAssistEnabled":true,"trainingComputeBinding":{"computeId":"string","nodeCount":0}} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --id "testLabelingJob" --resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices labeling-job show --id "testLabelingJob" --include-job-instructions true \
+ --include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices labeling-job list --skiptoken "skiptoken" --count "10" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Export-label #####
+```
+az machinelearningservices labeling-job export-label --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Pause #####
+```
+az machinelearningservices labeling-job pause --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Resume #####
+```
+az machinelearningservices labeling-job resume --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices labeling-job delete --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices model-container ####
+##### Create #####
+```
+az machinelearningservices model-container create --name "testContainer" \
+ --properties description="Model container description" tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices model-container show --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices model-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices model-container delete --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices model-version ####
+##### Create #####
+```
+az machinelearningservices model-version create --name "testContainer" \
+ --properties description="Model version description" assetPath={"path":"LocalUpload/12345/some/path","isDirectory":true} datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg123/providers/Microsoft.MachineLearningServices/workspaces/workspace123/datastores/datastore123" properties={"prop1":"value1","prop2":"value2"} stage="Production" tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --version "999" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices model-version show --name "testContainer" --resource-group "testrg123" --version "999" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices model-version list --name "testContainer" --resource-group "testrg123" --version "999" \
+ --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices model-version delete --name "testContainer" --resource-group "testrg123" --version "999" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices online-deployment ####
+##### Create #####
+```
+az machinelearningservices online-deployment create \
+ --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" \
+ --kind "string" --location "string" \
+ --properties description="string" codeConfiguration={"codeArtifactId":"string","command":"string"} deploymentConfiguration={"appInsightsEnabled":true,"computeType":"Managed","maxConcurrentRequestsPerInstance":0,"maxQueueWaitMs":0,"scoringTimeoutMs":0} environmentId="string" modelReference={"assetId":"string","referenceType":"Id"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} scaleSettings={"instanceCount":0,"maximum":0,"minimum":0,"scaleType":"Automatic"} \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices online-deployment list --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Update #####
+```
+az machinelearningservices online-deployment update \
+ --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" \
+ --kind "string" \
+ --deployment-configuration "{\\"appInsightsEnabled\\":true,\\"computeType\\":\\"Managed\\",\\"maxConcurrentRequestsPerInstance\\":0,\\"maxQueueWaitMs\\":0,\\"scoringTimeoutMs\\":0}" \
+ --scale-settings instance-count=0 maximum=0 minimum=0 scale-type="Automatic" \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Get-log #####
+```
+az machinelearningservices online-deployment get-log --container-type "StorageInitializer" --tail 0 \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices online-deployment delete --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+#### machinelearningservices online-endpoint ####
+##### Create #####
+```
+az machinelearningservices online-endpoint create \
+ --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" \
+ --kind "string" --location "string" \
+ --properties description="string" authMode="AMLToken" computeConfiguration={"computeType":"Managed"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} trafficRules={"additionalProp1":0,"additionalProp2":0,"additionalProp3":0} \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices online-endpoint show --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices online-endpoint list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Update #####
+```
+az machinelearningservices online-endpoint update \
+ --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" \
+ --kind "string" --traffic-rules additionalProp1=0 additionalProp2=0 additionalProp3=0 \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Get-token #####
+```
+az machinelearningservices online-endpoint get-token --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List-key #####
+```
+az machinelearningservices online-endpoint list-key --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Regenerate-key #####
+```
+az machinelearningservices online-endpoint regenerate-key --key-type "Primary" --key-value "string" \
+ --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices online-endpoint delete --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/__init__.py
new file mode 100644
index 00000000000..b234b2a3aa6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/__init__.py
@@ -0,0 +1,50 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+from azure.cli.core import AzCommandsLoader
+from azext_machinelearningservices.generated._help import helps # pylint: disable=unused-import
+try:
+ from azext_machinelearningservices.manual._help import helps # pylint: disable=reimported
+except ImportError:
+ pass
+
+
+class AzureMachineLearningWorkspacesCommandsLoader(AzCommandsLoader):
+
+ def __init__(self, cli_ctx=None):
+ from azure.cli.core.commands import CliCommandType
+ from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices_cl
+ machinelearningservices_custom = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.custom#{}',
+ client_factory=cf_machinelearningservices_cl)
+ parent = super(AzureMachineLearningWorkspacesCommandsLoader, self)
+ parent.__init__(cli_ctx=cli_ctx, custom_command_type=machinelearningservices_custom)
+
+ def load_command_table(self, args):
+ from azext_machinelearningservices.generated.commands import load_command_table
+ load_command_table(self, args)
+ try:
+ from azext_machinelearningservices.manual.commands import load_command_table as load_command_table_manual
+ load_command_table_manual(self, args)
+ except ImportError:
+ pass
+ return self.command_table
+
+ def load_arguments(self, command):
+ from azext_machinelearningservices.generated._params import load_arguments
+ load_arguments(self, command)
+ try:
+ from azext_machinelearningservices.manual._params import load_arguments as load_arguments_manual
+ load_arguments_manual(self, command)
+ except ImportError:
+ pass
+
+
+COMMAND_LOADER_CLS = AzureMachineLearningWorkspacesCommandsLoader
diff --git a/src/machinelearningservices/azext_machinelearningservices/action.py b/src/machinelearningservices/azext_machinelearningservices/action.py
new file mode 100644
index 00000000000..d95d53bf711
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/action.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.action import * # noqa: F403
+try:
+ from .manual.action import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
new file mode 100644
index 00000000000..cfc30c747c7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
@@ -0,0 +1,4 @@
+{
+ "azext.isExperimental": true,
+ "azext.minCliCoreVersion": "2.15.0"
+}
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/custom.py b/src/machinelearningservices/azext_machinelearningservices/custom.py
new file mode 100644
index 00000000000..dbe9d5f9742
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/custom.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.custom import * # noqa: F403
+try:
+ from .manual.custom import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
new file mode 100644
index 00000000000..94bba280ecb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
@@ -0,0 +1,124 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+def cf_machinelearningservices_cl(cli_ctx, *_):
+ from azure.cli.core.commands.client_factory import get_mgmt_service_client
+ from azext_machinelearningservices.vendored_sdks.machinelearningservices import AzureMachineLearningWorkspaces
+ return get_mgmt_service_client(cli_ctx,
+ AzureMachineLearningWorkspaces)
+
+
+def cf_workspace(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspaces
+
+
+def cf_workspace_feature(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_features
+
+
+def cf_usage(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).usages
+
+
+def cf_virtual_machine_size(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).virtual_machine_sizes
+
+
+def cf_quota(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).quotas
+
+
+def cf_machine_learning_compute(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).machine_learning_compute
+
+
+def cf_private_endpoint_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_endpoint_connections
+
+
+def cf_private_link_resource(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_link_resources
+
+
+def cf_linked_service(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).linked_services
+
+
+def cf_machine_learning_service(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).machine_learning_service
+
+
+def cf_notebook(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).notebooks
+
+
+def cf_workspace_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_connections
+
+
+def cf_code_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).code_containers
+
+
+def cf_code_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).code_versions
+
+
+def cf_component_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).component_containers
+
+
+def cf_component_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).component_versions
+
+
+def cf_data_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).data_containers
+
+
+def cf_datastore(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).datastores
+
+
+def cf_data_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).data_versions
+
+
+def cf_environment_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).environment_containers
+
+
+def cf_environment_specification_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).environment_specification_versions
+
+
+def cf_job(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).jobs
+
+
+def cf_labeling_job(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).labeling_jobs
+
+
+def cf_model_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).model_containers
+
+
+def cf_model_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).model_versions
+
+
+def cf_online_deployment(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).online_deployments
+
+
+def cf_online_endpoint(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).online_endpoints
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_help.py b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
new file mode 100644
index 00000000000..0c0bb63f377
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
@@ -0,0 +1,2408 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+
+from knack.help_files import helps
+
+
+helps['machinelearningservices workspace'] = """
+ type: group
+ short-summary: Manage workspace with machinelearningservices
+"""
+
+helps['machinelearningservices workspace list'] = """
+ type: command
+ short-summary: "Lists all the available machine learning workspaces under the specified resource group. And Lists \
+all the available machine learning workspaces under the specified subscription."
+ examples:
+ - name: Get Workspaces by Resource Group
+ text: |-
+ az machinelearningservices workspace list --resource-group "workspace-1234"
+ - name: Get Workspaces by subscription
+ text: |-
+ az machinelearningservices workspace list
+"""
+
+helps['machinelearningservices workspace show'] = """
+ type: command
+ short-summary: "Gets the properties of the specified machine learning workspace."
+ examples:
+ - name: Get Workspace
+ text: |-
+ az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace create'] = """
+ type: command
+ short-summary: "Create a workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --shared-private-link-resources
+ short-summary: "The list of shared private link resources in this workspace."
+ long-summary: |
+ Usage: --shared-private-link-resources name=XX private-link-resource-id=XX group-id=XX request-message=XX \
+status=XX
+
+ name: Unique name of the private link.
+ private-link-resource-id: The resource id that private link links to.
+ group-id: The private link resource group id.
+ request-message: Request message.
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+
+ Multiple actions can be specified by using more than one --shared-private-link-resources argument.
+ - name: --key-vault-properties
+ short-summary: "Customer Key vault properties."
+ long-summary: |
+ Usage: --key-vault-properties key-vault-arm-id=XX key-identifier=XX identity-client-id=XX
+
+ key-vault-arm-id: Required. The ArmId of the keyVault where the customer owned encryption key is present.
+ key-identifier: Required. Key vault uri to access the encryption key.
+ identity-client-id: For future use - The client id of the identity which will be used to access key vault.
+ examples:
+ - name: Create Workspace
+ text: |-
+ az machinelearningservices workspace create --type "SystemAssigned" --location "eastus2euap" \
+--description "test description" --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGr\
+oups/workspace-1234/providers/microsoft.insights/components/testinsights" --container-registry \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistr\
+y/registries/testRegistry" --key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/\
+keys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/re\
+sourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" \
+--hbi-workspace false --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/pr\
+oviders/Microsoft.KeyVault/vaults/testkv" --shared-private-link-resources name="testdbresource" \
+private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/M\
+icrosoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please \
+approve" status="Approved" --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accoun\
+tcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" --sku name="Basic" tier="Basic" \
+--resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace update'] = """
+ type: command
+ short-summary: "Updates a machine learning workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Update Workspace
+ text: |-
+ az machinelearningservices workspace update --description "new description" --friendly-name "New \
+friendly name" --sku name="Enterprise" tier="Enterprise" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace delete'] = """
+ type: command
+ short-summary: "Deletes a machine learning workspace."
+ examples:
+ - name: Delete Workspace
+ text: |-
+ az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-key'] = """
+ type: command
+ short-summary: "Lists all the keys associated with this workspace. This includes keys for the storage account, app \
+insights and password for container registry."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace resync-key'] = """
+ type: command
+ short-summary: "Resync all the keys associated with this workspace. This includes keys for the storage account, \
+app insights and password for container registry."
+ examples:
+ - name: Resync Workspace Keys
+ text: |-
+ az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices workspace is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+created.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--created
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+deleted.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--deleted
+"""
+
+helps['machinelearningservices workspace-feature'] = """
+ type: group
+ short-summary: Manage workspace feature with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-feature list'] = """
+ type: command
+ short-summary: "Lists all enabled features for a workspace."
+ examples:
+ - name: List Workspace features
+ text: |-
+ az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices usage'] = """
+ type: group
+ short-summary: Manage usage with machinelearningservices
+"""
+
+helps['machinelearningservices usage list'] = """
+ type: command
+ short-summary: "Gets the current usage information as well as limits for AML resources for given subscription and \
+location."
+ examples:
+ - name: List Usages
+ text: |-
+ az machinelearningservices usage list --location "eastus"
+"""
+
+helps['machinelearningservices virtual-machine-size'] = """
+ type: group
+ short-summary: Manage virtual machine size with machinelearningservices
+"""
+
+helps['machinelearningservices virtual-machine-size list'] = """
+ type: command
+ short-summary: "Returns supported VM Sizes in a location."
+ examples:
+ - name: List VM Sizes
+ text: |-
+ az machinelearningservices virtual-machine-size list --location "eastus"
+"""
+
+helps['machinelearningservices quota'] = """
+ type: group
+ short-summary: Manage quota with machinelearningservices
+"""
+
+helps['machinelearningservices quota list'] = """
+ type: command
+ short-summary: "Gets the currently assigned Workspace Quotas based on VMFamily."
+ examples:
+ - name: List workspace quotas by VMFamily
+ text: |-
+ az machinelearningservices quota list --location "eastus"
+"""
+
+helps['machinelearningservices quota update'] = """
+ type: command
+ short-summary: "Update quota for each VM family in workspace."
+ parameters:
+ - name: --value
+ short-summary: "The list for update quota."
+ long-summary: |
+ Usage: --value id=XX type=XX limit=XX unit=XX location=XX
+
+ id: Specifies the resource ID.
+ type: Specifies the resource type.
+ limit: The maximum permitted quota of the resource.
+ unit: An enum describing the unit of quota measurement.
+ location: Region of the AML workspace in the id.
+
+ Multiple actions can be specified by using more than one --value argument.
+ examples:
+ - name: update quotas
+ text: |-
+ az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServi\
+ces/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.Ma\
+chineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 \
+unit="Count" --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0\
+000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standa\
+rd_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+"""
+
+helps['machinelearningservices machine-learning-compute'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices
+"""
+
+helps['machinelearningservices machine-learning-compute list'] = """
+ type: command
+ short-summary: "Gets computes in specified workspace."
+ examples:
+ - name: Get Computes
+ text: |-
+ az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute show'] = """
+ type: command
+ short-summary: "Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not \
+returned - use 'keys' nested resource to get them."
+ examples:
+ - name: Get a AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Get a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Get an ComputeInstance
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute aks'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group aks
+"""
+
+helps['machinelearningservices machine-learning-compute aks create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"re\
+moteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeI\
+dleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000\
+000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/\
+versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"pe\
+rsonal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-00000000\
+0000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disable\
+d\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute aml-compute'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group aml-compute
+"""
+
+helps['machinelearningservices machine-learning-compute aml-compute create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\
+\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNode\
+Count\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000\
+-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images\
+/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthoriz\
+ationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-\
+0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAc\
+cess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute compute-instance'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group compute-instance
+"""
+
+helps['machinelearningservices machine-learning-compute compute-instance create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\
+\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"min\
+NodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0000\
+0000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/im\
+ages/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAut\
+horizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-\
+0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPub\
+licAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute data-factory'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group data-factory
+"""
+
+helps['machinelearningservices machine-learning-compute data-factory create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute data-lake-analytics'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group data-lake-analytics
+"""
+
+helps['machinelearningservices machine-learning-compute data-lake-analytics create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute databricks'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group databricks
+"""
+
+helps['machinelearningservices machine-learning-compute databricks create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute hd-insight'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group hd-insight
+"""
+
+helps['machinelearningservices machine-learning-compute hd-insight create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --administrator-account
+ short-summary: "Admin credentials for master node of the cluster"
+ long-summary: |
+ Usage: --administrator-account username=XX password=XX public-key-data=XX private-key-data=XX
+
+ username: Username of admin account
+ password: Password of admin account
+ public-key-data: Public key data
+ private-key-data: Private key data
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute virtual-machine'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group virtual-machine
+"""
+
+helps['machinelearningservices machine-learning-compute virtual-machine create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --administrator-account
+ short-summary: "Admin credentials for virtual machine"
+ long-summary: |
+ Usage: --administrator-account username=XX password=XX public-key-data=XX private-key-data=XX
+
+ username: Username of admin account
+ password: Password of admin account
+ public-key-data: Public key data
+ private-key-data: Private key data
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute update'] = """
+ type: command
+ short-summary: "Updates properties of a compute. This call will overwrite a compute if it exists. This is a \
+nonrecoverable operation."
+ parameters:
+ - name: --scale-settings
+ short-summary: "Desired scale settings for the amlCompute."
+ long-summary: |
+ Usage: --scale-settings max-node-count=XX min-node-count=XX node-idle-time-before-scale-down=XX
+
+ max-node-count: Required. Max number of nodes to use
+ min-node-count: Min number of nodes to use
+ node-idle-time-before-scale-down: Node Idle Time before scaling down amlCompute. This string needs to be \
+in the RFC Format.
+ examples:
+ - name: Update a AmlCompute Compute
+ text: |-
+ az machinelearningservices machine-learning-compute update --compute-name "compute123" --scale-settings \
+max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute delete'] = """
+ type: command
+ short-summary: "Deletes specified Machine Learning compute."
+ examples:
+ - name: Delete Compute
+ text: |-
+ az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group \
+"testrg123" --underlying-resource-action "Delete" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute list-key'] = """
+ type: command
+ short-summary: "Gets secrets related to Machine Learning compute (storage keys, service credentials, etc)."
+ examples:
+ - name: List AKS Compute Keys
+ text: |-
+ az machinelearningservices machine-learning-compute list-key --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute list-node'] = """
+ type: command
+ short-summary: "Get the details (e.g IP address, port etc) of all the compute nodes in the compute."
+ examples:
+ - name: Get compute nodes information for a compute
+ text: |-
+ az machinelearningservices machine-learning-compute list-node --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute restart'] = """
+ type: command
+ short-summary: "Posts a restart action to a compute instance."
+ examples:
+ - name: Restart ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute restart --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute start'] = """
+ type: command
+ short-summary: "Posts a start action to a compute instance."
+ examples:
+ - name: Start ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute stop'] = """
+ type: command
+ short-summary: "Posts a stop action to a compute instance."
+ examples:
+ - name: Stop ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \
+machine-learning-compute is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully created.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully updated.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully deleted.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --deleted
+"""
+
+helps['machinelearningservices'] = """
+ type: group
+ short-summary: Manage with machinelearningservices
+"""
+
+helps['machinelearningservices list-sku'] = """
+ type: command
+ short-summary: "Lists all skus with associated features."
+ examples:
+ - name: List Skus
+ text: |-
+ az machinelearningservices list-sku
+"""
+
+helps['machinelearningservices private-endpoint-connection'] = """
+ type: group
+ short-summary: Manage private endpoint connection with machinelearningservices
+"""
+
+helps['machinelearningservices private-endpoint-connection show'] = """
+ type: command
+ short-summary: "Gets the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceGetPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection delete'] = """
+ type: command
+ short-summary: "Deletes the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceDeletePrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection put'] = """
+ type: command
+ short-summary: "Update the state of specified private endpoint connection associated with the workspace."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --private-link-service-connection-state
+ short-summary: "A collection of information about the state of the connection between service consumer and \
+provider."
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+ description: The reason for approval/rejection of the connection.
+ actions-required: A message indicating if changes on the service provider require any updates on the \
+consumer.
+ examples:
+ - name: WorkspacePutPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-link-resource'] = """
+ type: group
+ short-summary: Manage private link resource with machinelearningservices
+"""
+
+helps['machinelearningservices private-link-resource list'] = """
+ type: command
+ short-summary: "Gets the private link resources that need to be created for a workspace."
+ examples:
+ - name: WorkspaceListPrivateLinkResources
+ text: |-
+ az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices linked-service'] = """
+ type: group
+ short-summary: Manage linked service with machinelearningservices
+"""
+
+helps['machinelearningservices linked-service list'] = """
+ type: command
+ short-summary: "List all linked services under an AML workspace."
+ examples:
+ - name: ListLinkedServices
+ text: |-
+ az machinelearningservices linked-service list --resource-group "resourceGroup-1" --workspace-name \
+"workspace-1"
+"""
+
+helps['machinelearningservices linked-service show'] = """
+ type: command
+ short-summary: "Get the detail of a linked service."
+ examples:
+ - name: GetLinkedService
+ text: |-
+ az machinelearningservices linked-service show --link-name "link-1" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices linked-service create'] = """
+ type: command
+ short-summary: "Add a new linked service."
+ parameters:
+ - name: --properties
+ short-summary: "LinkedService specific properties."
+ long-summary: |
+ Usage: --properties linked-service-resource-id=XX created-time=XX modified-time=XX
+
+ linked-service-resource-id: Required. ResourceId of the link target of the linked service.
+ created-time: The creation time of the linked service.
+ modified-time: The last modified time of the linked service.
+ examples:
+ - name: CreateLinkedService
+ text: |-
+ az machinelearningservices linked-service create --link-name "link-1" --name "link-1" --type \
+"SystemAssigned" --location "westus" --properties linked-service-resource-id="/subscriptions/00000000-1111-2222-3333-44\
+4444444444/resourceGroups/resourceGroup-1/providers/Microsoft.Synapse/workspaces/Syn-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices linked-service delete'] = """
+ type: command
+ short-summary: "Delete a linked service."
+ examples:
+ - name: DeleteLinkedService
+ text: |-
+ az machinelearningservices linked-service delete --link-name "link-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices machine-learning-service'] = """
+ type: group
+ short-summary: Manage machine learning service with machinelearningservices
+"""
+
+helps['machinelearningservices machine-learning-service list'] = """
+ type: command
+ short-summary: "Gets services in specified workspace."
+ examples:
+ - name: Get Services
+ text: |-
+ az machinelearningservices machine-learning-service list --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service show'] = """
+ type: command
+ short-summary: "Get a Service by name."
+ examples:
+ - name: Get Service
+ text: |-
+ az machinelearningservices machine-learning-service show --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service create'] = """
+ type: command
+ short-summary: "Create service. This call will update a service if it exists. This is a nonrecoverable operation. \
+If your intent is to create a new service, do a GET first to verify that it does not exist yet."
+ examples:
+ - name: Create Or Update service
+ text: |-
+ az machinelearningservices machine-learning-service create --properties "{\\"appInsightsEnabled\\":true,\
+\\"authEnabled\\":true,\\"computeType\\":\\"ACI\\",\\"containerResourceRequirements\\":{\\"cpu\\":1,\\"memoryInGB\\":1}\
+,\\"environmentImageRequest\\":{\\"assets\\":[{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":fal\
+se,\\"url\\":\\"aml://storage/azureml/score.py\\"}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{\\"name\\":\\\
+"AzureML-Scikit-learn-0.20.3\\",\\"docker\\":{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azureml/ba\
+se:openmpi3.1.2-ubuntu16.04\\",\\"baseImageRegistry\\":{\\"address\\":null,\\"password\\":null,\\"username\\":null}},\\\
+"environmentVariables\\":{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"},\\"inferencingStackVersion\\":null,\\"python\\":{\
+\\"baseCondaEnvironment\\":null,\\"condaDependencies\\":{\\"name\\":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"c\
+hannels\\":[\\"conda-forge\\"],\\"dependencies\\":[\\"python=3.6.2\\",{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml\
+-defaults==1.0.69\\",\\"azureml-telemetry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-trai\
+n-core==1.0.69\\",\\"scikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}]},\\"interpr\
+eterPath\\":\\"python\\",\\"userManagedDependencies\\":false},\\"spark\\":{\\"packages\\":[],\\"precachePackages\\":tru\
+e,\\"repositories\\":[]},\\"version\\":\\"3\\"},\\"models\\":[{\\"name\\":\\"sklearn_regression_model.pkl\\",\\"mimeTyp\
+e\\":\\"application/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_regression_model.pkl\\"}]},\\"location\\":\\\
+"eastus2\\"}" --resource-group "testrg123" --service-name "service456" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service update'] = """
+ type: command
+ short-summary: "Update service. This call will update a service if it exists. This is a nonrecoverable operation. \
+If your intent is to Update a new service, do a GET first to verify that it does not exist yet."
+"""
+
+helps['machinelearningservices machine-learning-service delete'] = """
+ type: command
+ short-summary: "Delete a specific Service.."
+ examples:
+ - name: Delete Service
+ text: |-
+ az machinelearningservices machine-learning-service delete --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \
+machine-learning-service is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-service is \
+successfully created.
+ text: |-
+ az machinelearningservices machine-learning-service wait --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-service is \
+successfully updated.
+ text: |-
+ az machinelearningservices machine-learning-service wait --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123" --updated
+"""
+
+helps['machinelearningservices notebook'] = """
+ type: group
+ short-summary: Manage notebook with machinelearningservices
+"""
+
+helps['machinelearningservices notebook list-key'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices notebook list-key --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices notebook prepare'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: Prepare Notebook
+ text: |-
+ az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace-connection'] = """
+ type: group
+ short-summary: Manage workspace connection with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-connection list'] = """
+ type: command
+ short-summary: "List all connections under a AML workspace."
+ examples:
+ - name: ListWorkspaceConnections
+ text: |-
+ az machinelearningservices workspace-connection list --category "ACR" --resource-group \
+"resourceGroup-1" --target "www.facebook.com" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection show'] = """
+ type: command
+ short-summary: "Get the detail of a workspace connection."
+ examples:
+ - name: GetWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection create'] = """
+ type: command
+ short-summary: "Add a new workspace connection."
+ examples:
+ - name: CreateWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection create --connection-name "connection-1" --name \
+"connection-1" --auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection delete'] = """
+ type: command
+ short-summary: "Delete a workspace connection."
+ examples:
+ - name: DeleteWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+--resource-group "resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices code-container'] = """
+ type: group
+ short-summary: Manage code container with machinelearningservices
+"""
+
+helps['machinelearningservices code-container list'] = """
+ type: command
+ short-summary: "List containers."
+ examples:
+ - name: List Code Container.
+ text: |-
+ az machinelearningservices code-container list --skiptoken "skiptoken" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Code Container.
+ text: |-
+ az machinelearningservices code-container show --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Code Container.
+ text: |-
+ az machinelearningservices code-container create --name "testContainer" --properties \
+description="string" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices code-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices code-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Code Container.
+ text: |-
+ az machinelearningservices code-container delete --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version'] = """
+ type: group
+ short-summary: Manage code version with machinelearningservices
+"""
+
+helps['machinelearningservices code-version list'] = """
+ type: command
+ short-summary: "List versions."
+ examples:
+ - name: List Code Version.
+ text: |-
+ az machinelearningservices code-version list --name "testContainer" --skiptoken "skiptoken" \
+--resource-group "testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Code Version.
+ text: |-
+ az machinelearningservices code-version show --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version create'] = """
+ type: command
+ short-summary: "Create version."
+ parameters:
+ - name: --asset-path
+ short-summary: "DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path \
+instead"
+ long-summary: |
+ Usage: --asset-path path=XX is-directory=XX
+
+ path: Required. The path of file/directory.
+ is-directory: Whether the path defines a directory or a single file.
+ examples:
+ - name: CreateOrUpdate Code Version.
+ text: |-
+ az machinelearningservices code-version create --name "testContainer" --properties description="string" \
+assetPath={"path":"string","isDirectory":true} datastoreId="string" properties={"prop1":"value1","prop2":"value2"} \
+tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version update'] = """
+ type: command
+ short-summary: "Update version."
+ parameters:
+ - name: --asset-path
+ short-summary: "DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path \
+instead"
+ long-summary: |
+ Usage: --asset-path path=XX is-directory=XX
+
+ path: Required. The path of file/directory.
+ is-directory: Whether the path defines a directory or a single file.
+"""
+
+helps['machinelearningservices code-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Code Version.
+ text: |-
+ az machinelearningservices code-version delete --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices component-container'] = """
+ type: group
+ short-summary: Manage component container with machinelearningservices
+"""
+
+helps['machinelearningservices component-container list'] = """
+ type: command
+ short-summary: "List containers."
+ examples:
+ - name: List Component Container.
+ text: |-
+ az machinelearningservices component-container list --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices component-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Component Container.
+ text: |-
+ az machinelearningservices component-container show --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices component-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Component Container.
+ text: |-
+ az machinelearningservices component-container create --name "testContainer" --properties \
+description="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices component-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices component-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Component Container.
+ text: |-
+ az machinelearningservices component-container delete --name "testContainer" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices component-version'] = """
+ type: group
+ short-summary: Manage component version with machinelearningservices
+"""
+
+helps['machinelearningservices component-version list'] = """
+ type: command
+ short-summary: "List versions."
+ examples:
+ - name: List Component Version.
+ text: |-
+ az machinelearningservices component-version list --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices component-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Component Version.
+ text: |-
+ az machinelearningservices component-version show --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices component-version create'] = """
+ type: command
+ short-summary: "Create version."
+ parameters:
+ - name: --code-configuration
+ short-summary: "Code configuration of the job. Includes CodeArtifactId and Command."
+ long-summary: |
+ Usage: --code-configuration code-artifact-id=XX command=XX
+
+ code-artifact-id: The ID of the code asset.
+ command: Required. The command to execute on startup of the job. eg. ["python", "train.py"]
+ examples:
+ - name: CreateOrUpdate Component Version.
+ text: |-
+ az machinelearningservices component-version create --name "testContainer" --properties \
+description="string" codeConfiguration={"codeArtifactId":"string","command":"string"} component={"componentType":"Comma\
+ndComponent","displayName":"string","inputs":{"additionalProp1":{"description":"string","default":"string","componentIn\
+putType":"Generic","dataType":"string","optional":true},"additionalProp2":{"description":"string","default":"string","c\
+omponentInputType":"Generic","dataType":"string","optional":true},"additionalProp3":{"description":"string","default":"\
+string","componentInputType":"Generic","dataType":"string","optional":true}},"isDeterministic":true,"outputs":{"additio\
+nalProp1":{"description":"string","dataType":"string"},"additionalProp2":{"description":"string","dataType":"string"},"\
+additionalProp3":{"description":"string","dataType":"string"}}} environmentId="\\"/subscriptions/{{subscriptionId}}/res\
+ourceGroups/{{resourceGroup}}/providers/Microsoft.MachineLearningServices/workspaces/{{workspaceName}}/Environments/Azu\
+reML-Minimal\\"" generatedBy="User" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3"\
+:"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group \
+"testrg123" --version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices component-version update'] = """
+ type: command
+ short-summary: "Update version."
+ parameters:
+ - name: --code-configuration
+ short-summary: "Code configuration of the job. Includes CodeArtifactId and Command."
+ long-summary: |
+ Usage: --code-configuration code-artifact-id=XX command=XX
+
+ code-artifact-id: The ID of the code asset.
+ command: Required. The command to execute on startup of the job. eg. ["python", "train.py"]
+"""
+
+helps['machinelearningservices component-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Component Version.
+ text: |-
+ az machinelearningservices component-version delete --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices data-container'] = """
+ type: group
+ short-summary: Manage data container with machinelearningservices
+"""
+
+helps['machinelearningservices data-container list'] = """
+ type: command
+ short-summary: "List containers."
+ examples:
+ - name: List Data Container.
+ text: |-
+ az machinelearningservices data-container list --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices data-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Data Container.
+ text: |-
+ az machinelearningservices data-container show --name "datacontainer123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Data Container.
+ text: |-
+ az machinelearningservices data-container create --name "datacontainer123" --properties \
+description="string" properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} \
+--resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices data-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Data Container.
+ text: |-
+ az machinelearningservices data-container delete --name "datacontainer123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices datastore'] = """
+ type: group
+ short-summary: Manage datastore with machinelearningservices
+"""
+
+helps['machinelearningservices datastore list'] = """
+ type: command
+ short-summary: "List datastores."
+ examples:
+ - name: List datastores.
+ text: |-
+ az machinelearningservices datastore list --resource-group "testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore show'] = """
+ type: command
+ short-summary: "Get datastore."
+ examples:
+ - name: Get datastore.
+ text: |-
+ az machinelearningservices datastore show --name "testDatastore" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore create'] = """
+ type: command
+ short-summary: "Create datastore."
+ parameters:
+ - name: --linked-info
+ short-summary: "Information about the datastore origin, if linked."
+ long-summary: |
+ Usage: --linked-info linked-id=XX linked-resource-name=XX origin=XX
+
+ linked-id: Linked service ID.
+ linked-resource-name: Linked service resource name.
+ origin: Type of the linked service.
+ - name: --gluster-fs
+ short-summary: "GlusterFS volume information."
+ long-summary: |
+ Usage: --gluster-fs server-address=XX volume-name=XX
+
+ server-address: Required. GlusterFS server address (can be the IP address or server name).
+ volume-name: Required. GlusterFS volume name.
+ examples:
+ - name: Create or update datastore.
+ text: |-
+ az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"azureDataLake":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certifi\
+cate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-\
+b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"serviceP\
+rincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceU\
+ri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"st\
+oreName":"string"},"azureMySql":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","c\
+ertificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717\
+-4562-b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"se\
+rvicePrincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","res\
+ourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"\
+}},"databaseName":"string","endpoint":"database.windows.net","portNumber":0,"serverName":"string"},"azurePostgreSql":{"\
+credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3\
+fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbpri\
+nt":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"s\
+tring","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa\
+85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"databaseName":"string","enable\
+SSL":true,"endpoint":"database.windows.net","portNumber":0,"serverName":"string"},"azureSqlDatabase":{"credentials":{"a\
+ccountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-456\
+2-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"},"d\
+atastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"string","clientId\
+":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-\
+b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"databaseName":"string","endpoint":"database.wi\
+ndows.net","portNumber":0,"serverName":"string"},"azureStorage":{"accountName":"string","blobCacheTimeout":0,"container\
+Name":"string","credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"strin\
+g","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f6\
+6afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"\
+authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string"\
+,"tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"endpoint":"cor\
+e.windows.net","protocol":"https"},"datastoreContentsType":"AzureBlob","glusterFs":{"serverAddress":"string","volumeNam\
+e":"string"}} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore update'] = """
+ type: command
+ short-summary: "Update datastore."
+ parameters:
+ - name: --linked-info
+ short-summary: "Information about the datastore origin, if linked."
+ long-summary: |
+ Usage: --linked-info linked-id=XX linked-resource-name=XX origin=XX
+
+ linked-id: Linked service ID.
+ linked-resource-name: Linked service resource name.
+ origin: Type of the linked service.
+ - name: --gluster-fs
+ short-summary: "GlusterFS volume information."
+ long-summary: |
+ Usage: --gluster-fs server-address=XX volume-name=XX
+
+ server-address: Required. GlusterFS server address (can be the IP address or server name).
+ volume-name: Required. GlusterFS volume name.
+"""
+
+helps['machinelearningservices datastore delete'] = """
+ type: command
+ short-summary: "Delete datastore."
+ examples:
+ - name: Delete datastore.
+ text: |-
+ az machinelearningservices datastore delete --name "testDatastore" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore list-secret'] = """
+ type: command
+ short-summary: "Get datastore secrets."
+ examples:
+ - name: Get datastore secrets.
+ text: |-
+ az machinelearningservices datastore list-secret --name "testDatastore" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices data-version'] = """
+ type: group
+ short-summary: Manage data version with machinelearningservices
+"""
+
+helps['machinelearningservices data-version list'] = """
+ type: command
+ short-summary: "List versions."
+ examples:
+ - name: List Data Version.
+ text: |-
+ az machinelearningservices data-version list --name "dataset123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Data Version.
+ text: |-
+ az machinelearningservices data-version show --name "dataset123" --resource-group "testrg123" --version \
+"456" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-version create'] = """
+ type: command
+ short-summary: "Create version."
+ parameters:
+ - name: --asset-path
+ short-summary: "DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path \
+instead"
+ long-summary: |
+ Usage: --asset-path path=XX is-directory=XX
+
+ path: Required. The path of file/directory.
+ is-directory: Whether the path defines a directory or a single file.
+ examples:
+ - name: CreateOrUpdate Data Version.
+ text: |-
+ az machinelearningservices data-version create --name "dataset123" --properties description="string" \
+assetPath={"path":"string","isDirectory":false} datasetType="Simple" datastoreId="string" \
+properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} --resource-group \
+"testrg123" --version "456" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-version update'] = """
+ type: command
+ short-summary: "Update version."
+ parameters:
+ - name: --asset-path
+ short-summary: "DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path \
+instead"
+ long-summary: |
+ Usage: --asset-path path=XX is-directory=XX
+
+ path: Required. The path of file/directory.
+ is-directory: Whether the path defines a directory or a single file.
+"""
+
+helps['machinelearningservices data-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Data Version.
+ text: |-
+ az machinelearningservices data-version delete --name "dataset123" --resource-group "testrg123" \
+--version "456" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices environment-container'] = """
+ type: group
+ short-summary: Manage environment container with machinelearningservices
+"""
+
+helps['machinelearningservices environment-container list'] = """
+ type: command
+ short-summary: "List containers."
+ examples:
+ - name: List Environment Container.
+ text: |-
+ az machinelearningservices environment-container list --skiptoken "skiptoken" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Environment Container.
+ text: |-
+ az machinelearningservices environment-container show --name "testContainer" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Environment Container.
+ text: |-
+ az machinelearningservices environment-container create --name "testContainer" --properties \
+description="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices environment-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Environment Container.
+ text: |-
+ az machinelearningservices environment-container delete --name "testContainer" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version'] = """
+ type: group
+ short-summary: Manage environment specification version with machinelearningservices
+"""
+
+helps['machinelearningservices environment-specification-version list'] = """
+ type: command
+ short-summary: "List versions."
+ examples:
+ - name: List Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version list --name "testContainer" --skiptoken \
+"skiptoken" --resource-group "testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version show --name "testContainer" \
+--resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version create'] = """
+ type: command
+ short-summary: "Create an EnvironmentSpecificationVersion."
+ parameters:
+ - name: --docker-image
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-image docker-image-uri=XX docker-specification-type=XX operating-system-type=XX
+
+ docker-image-uri: Required. Image name of a custom base image.
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --docker-build
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-build dockerfile=XX context=XX docker-specification-type=XX operating-system-type=XX
+
+ dockerfile: Required. Docker command line instructions to assemble an image.
+ context: Path to a snapshot of the Docker Context. This property is only valid if Dockerfile is specified. \
+The path is relative to the asset path which must contain a single Blob URI value. Microsoft.MachineLearning.Management\
+FrontEnd.Contracts.Assets.Asset.Path
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --liveness-route
+ short-summary: "The route to check the liveness of the inference server container."
+ long-summary: |
+ Usage: --liveness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --readiness-route
+ short-summary: "The route to check the readiness of the inference server container."
+ long-summary: |
+ Usage: --readiness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --scoring-route
+ short-summary: "The port to send the scoring requests to, within the inference server container."
+ long-summary: |
+ Usage: --scoring-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ examples:
+ - name: CreateOrUpdate Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version create --name "testContainer" --properties \
+description="string" condaFile="string" docker={"dockerSpecificationType":"Build","dockerfile":"string"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version update'] = """
+ type: command
+ short-summary: "Update an EnvironmentSpecificationVersion."
+ parameters:
+ - name: --docker-image
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-image docker-image-uri=XX docker-specification-type=XX operating-system-type=XX
+
+ docker-image-uri: Required. Image name of a custom base image.
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --docker-build
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-build dockerfile=XX context=XX docker-specification-type=XX operating-system-type=XX
+
+ dockerfile: Required. Docker command line instructions to assemble an image.
+ context: Path to a snapshot of the Docker Context. This property is only valid if Dockerfile is specified. \
+The path is relative to the asset path which must contain a single Blob URI value. Microsoft.MachineLearning.Management\
+FrontEnd.Contracts.Assets.Asset.Path
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --liveness-route
+ short-summary: "The route to check the liveness of the inference server container."
+ long-summary: |
+ Usage: --liveness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --readiness-route
+ short-summary: "The route to check the readiness of the inference server container."
+ long-summary: |
+ Usage: --readiness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --scoring-route
+ short-summary: "The port to send the scoring requests to, within the inference server container."
+ long-summary: |
+ Usage: --scoring-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+"""
+
+helps['machinelearningservices environment-specification-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version delete --name "testContainer" \
+--resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices job'] = """
+ type: group
+ short-summary: Manage job with machinelearningservices
+"""
+
+helps['machinelearningservices job list'] = """
+ type: command
+ short-summary: "Lists Jobs in the workspace."
+ examples:
+ - name: List Command Job.
+ text: |-
+ az machinelearningservices job list --skiptoken "skiptoken" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+ - name: List Sweep Job.
+ text: |-
+ az machinelearningservices job list --skiptoken "skiptoken" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices job show'] = """
+ type: command
+ short-summary: "Gets a Job by name/id."
+ examples:
+ - name: Get Command Job.
+ text: |-
+ az machinelearningservices job show --id "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+ - name: Get Sweep Job.
+ text: |-
+ az machinelearningservices job show --id "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices job create'] = """
+ type: command
+ short-summary: "Creates and executes a Job."
+ examples:
+ - name: CreateOrUpdate Command Job.
+ text: |-
+ az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"properties\\":{\\\
+"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"\
+additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}}" --id \
+"testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+ - name: CreateOrUpdate Sweep Job.
+ text: |-
+ az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"properties\\":{\\\
+"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"\
+additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}}" --id \
+"testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices job update'] = """
+ type: command
+ short-summary: "Update and executes a Job."
+"""
+
+helps['machinelearningservices job delete'] = """
+ type: command
+ short-summary: "Deletes a Job."
+ examples:
+ - name: Delete Command Job.
+ text: |-
+ az machinelearningservices job delete --id "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+ - name: Delete Sweep Job.
+ text: |-
+ az machinelearningservices job delete --id "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices job cancel'] = """
+ type: command
+ short-summary: "Cancels a Job."
+ examples:
+ - name: Cancel Command Job.
+ text: |-
+ az machinelearningservices job cancel --id "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+ - name: Cancel Sweep Job.
+ text: |-
+ az machinelearningservices job cancel --id "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices job wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices job is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices job is successfully deleted.
+ text: |-
+ az machinelearningservices job wait --id "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace" --deleted
+"""
+
+helps['machinelearningservices labeling-job'] = """
+ type: group
+ short-summary: Manage labeling job with machinelearningservices
+"""
+
+helps['machinelearningservices labeling-job list'] = """
+ type: command
+ short-summary: "Lists labeling jobs in the workspace."
+ examples:
+ - name: List Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job list --skiptoken "skiptoken" --count "10" --resource-group \
+"workspace-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job show'] = """
+ type: command
+ short-summary: "Gets a labeling job by name/id."
+ examples:
+ - name: Get Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job show --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job create'] = """
+ type: command
+ short-summary: "Create a labeling job."
+ parameters:
+ - name: --dataset-configuration
+ short-summary: "Configuration of dataset used in the job."
+ long-summary: |
+ Usage: --dataset-configuration asset-name=XX incremental-dataset-refresh-enabled=XX dataset-version=XX
+
+ asset-name: Name of the data asset to perform labeling.
+ incremental-dataset-refresh-enabled: Indicates whether to enable incremental dataset refresh.
+ dataset-version: AML dataset version.
+ - name: --labeling-job-image-properties
+ short-summary: "Properties of a labeling job for image data"
+ long-summary: |
+ Usage: --labeling-job-image-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of image labeling job.
+ media-type: Required. Media type of the job.
+ - name: --labeling-job-text-properties
+ short-summary: "Properties of a labeling job for text data"
+ long-summary: |
+ Usage: --labeling-job-text-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of text labeling job.
+ media-type: Required. Media type of the job.
+ - name: --inferencing-compute-binding
+ short-summary: "AML compute binding used in inferencing."
+ long-summary: |
+ Usage: --inferencing-compute-binding compute-id=XX node-count=XX is-local=XX
+
+ compute-id: Resource ID of the compute resource.
+ node-count: Number of nodes.
+ is-local: Set to true for jobs running on local compute.
+ - name: --training-compute-binding
+ short-summary: "AML compute binding used in training."
+ long-summary: |
+ Usage: --training-compute-binding compute-id=XX node-count=XX is-local=XX
+
+ compute-id: Resource ID of the compute resource.
+ node-count: Number of nodes.
+ is-local: Set to true for jobs running on local compute.
+ examples:
+ - name: CreateOrUpdate Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job create --properties description="string" \
+datasetConfiguration={"assetName":"string","datasetVersion":"string","incrementalDatasetRefreshEnabled":true} \
+jobInstructions={"uri":"string"} jobType="Labeling" labelCategories={"additionalProp1":{"allowMultiSelect":true,"classe\
+s":{"additionalProp1":{"displayName":"string","subclasses":{}},"additionalProp2":{"displayName":"string","subclasses":{\
+}},"additionalProp3":{"displayName":"string","subclasses":{}}},"displayName":"string"},"additionalProp2":{"allowMultiSe\
+lect":true,"classes":{"additionalProp1":{"displayName":"string","subclasses":{}},"additionalProp2":{"displayName":"stri\
+ng","subclasses":{}},"additionalProp3":{"displayName":"string","subclasses":{}}},"displayName":"string"},"additionalPro\
+p3":{"allowMultiSelect":true,"classes":{"additionalProp1":{"displayName":"string","subclasses":{}},"additionalProp2":{"\
+displayName":"string","subclasses":{}},"additionalProp3":{"displayName":"string","subclasses":{}}},"displayName":"strin\
+g"}} labelingJobMediaProperties={"mediaType":"Image"} mlAssistConfiguration={"inferencingComputeBinding":{"computeId":"\
+string","nodeCount":0},"mlAssistEnabled":true,"trainingComputeBinding":{"computeId":"string","nodeCount":0}} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --id "testLabelingJob" \
+--resource-group "workspace-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job update'] = """
+ type: command
+ short-summary: "Update a labeling job."
+ parameters:
+ - name: --dataset-configuration
+ short-summary: "Configuration of dataset used in the job."
+ long-summary: |
+ Usage: --dataset-configuration asset-name=XX incremental-dataset-refresh-enabled=XX dataset-version=XX
+
+ asset-name: Name of the data asset to perform labeling.
+ incremental-dataset-refresh-enabled: Indicates whether to enable incremental dataset refresh.
+ dataset-version: AML dataset version.
+ - name: --labeling-job-image-properties
+ short-summary: "Properties of a labeling job for image data"
+ long-summary: |
+ Usage: --labeling-job-image-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of image labeling job.
+ media-type: Required. Media type of the job.
+ - name: --labeling-job-text-properties
+ short-summary: "Properties of a labeling job for text data"
+ long-summary: |
+ Usage: --labeling-job-text-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of text labeling job.
+ media-type: Required. Media type of the job.
+ - name: --inferencing-compute-binding
+ short-summary: "AML compute binding used in inferencing."
+ long-summary: |
+ Usage: --inferencing-compute-binding compute-id=XX node-count=XX is-local=XX
+
+ compute-id: Resource ID of the compute resource.
+ node-count: Number of nodes.
+ is-local: Set to true for jobs running on local compute.
+ - name: --training-compute-binding
+ short-summary: "AML compute binding used in training."
+ long-summary: |
+ Usage: --training-compute-binding compute-id=XX node-count=XX is-local=XX
+
+ compute-id: Resource ID of the compute resource.
+ node-count: Number of nodes.
+ is-local: Set to true for jobs running on local compute.
+"""
+
+helps['machinelearningservices labeling-job delete'] = """
+ type: command
+ short-summary: "Delete a labeling job."
+ examples:
+ - name: Delete Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job delete --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job export-label'] = """
+ type: command
+ short-summary: "Export labels from a labeling job."
+ parameters:
+ - name: --coco-export-summary
+ long-summary: |
+ Usage: --coco-export-summary format=XX
+
+ format: Required. The format of exported labels, also as the discriminator.
+ - name: --csv-export-summary
+ long-summary: |
+ Usage: --csv-export-summary format=XX
+
+ format: Required. The format of exported labels, also as the discriminator.
+ - name: --dataset-export-summary
+ long-summary: |
+ Usage: --dataset-export-summary format=XX
+
+ format: Required. The format of exported labels, also as the discriminator.
+ examples:
+ - name: ExportLabels Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job export-label --id "testLabelingJob" --resource-group \
+"workspace-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job pause'] = """
+ type: command
+ short-summary: "Pause a labeling job."
+ examples:
+ - name: Pause Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job pause --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job resume'] = """
+ type: command
+ short-summary: "Resume a labeling job."
+ examples:
+ - name: Resume Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job resume --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices labeling-job is \
+met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices labeling-job is successfully \
+created.
+ text: |-
+ az machinelearningservices labeling-job wait --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices labeling-job is successfully \
+updated.
+ text: |-
+ az machinelearningservices labeling-job wait --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace" --updated
+"""
+
+helps['machinelearningservices model-container'] = """
+ type: group
+ short-summary: Manage model container with machinelearningservices
+"""
+
+helps['machinelearningservices model-container list'] = """
+ type: command
+ short-summary: "List model containers."
+ examples:
+ - name: List Model Container.
+ text: |-
+ az machinelearningservices model-container list --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices model-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Model Container.
+ text: |-
+ az machinelearningservices model-container show --name "testContainer" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Model Container.
+ text: |-
+ az machinelearningservices model-container create --name "testContainer" --properties \
+description="Model container description" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices model-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Model Container.
+ text: |-
+ az machinelearningservices model-container delete --name "testContainer" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-version'] = """
+ type: group
+ short-summary: Manage model version with machinelearningservices
+"""
+
+helps['machinelearningservices model-version list'] = """
+ type: command
+ short-summary: "List model versions."
+ examples:
+ - name: List Model Version.
+ text: |-
+ az machinelearningservices model-version list --name "testContainer" --resource-group "testrg123" \
+--version "999" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Model Version.
+ text: |-
+ az machinelearningservices model-version show --name "testContainer" --resource-group "testrg123" \
+--version "999" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-version create'] = """
+ type: command
+ short-summary: "Create version."
+ parameters:
+ - name: --asset-path
+ short-summary: "DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path \
+instead"
+ long-summary: |
+ Usage: --asset-path path=XX is-directory=XX
+
+ path: Required. The path of file/directory.
+ is-directory: Whether the path defines a directory or a single file.
+ examples:
+ - name: CreateOrUpdate Model Version.
+ text: |-
+ az machinelearningservices model-version create --name "testContainer" --properties description="Model \
+version description" assetPath={"path":"LocalUpload/12345/some/path","isDirectory":true} \
+datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg123/providers/Microsoft.MachineLe\
+arningServices/workspaces/workspace123/datastores/datastore123" properties={"prop1":"value1","prop2":"value2"} \
+stage="Production" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --version "999" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-version update'] = """
+ type: command
+ short-summary: "Update version."
+ parameters:
+ - name: --asset-path
+ short-summary: "DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path \
+instead"
+ long-summary: |
+ Usage: --asset-path path=XX is-directory=XX
+
+ path: Required. The path of file/directory.
+ is-directory: Whether the path defines a directory or a single file.
+"""
+
+helps['machinelearningservices model-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Model Version.
+ text: |-
+ az machinelearningservices model-version delete --name "testContainer" --resource-group "testrg123" \
+--version "999" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment'] = """
+ type: group
+ short-summary: Manage online deployment with machinelearningservices
+"""
+
+helps['machinelearningservices online-deployment list'] = """
+ type: command
+ short-summary: "List Inference Endpoint Deployments."
+ examples:
+ - name: List Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment list --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment show'] = """
+ type: command
+ short-summary: "Get Inference Deployment Deployment."
+ examples:
+ - name: Get Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment create'] = """
+ type: command
+ short-summary: "Create Inference Endpoint Deployment."
+ parameters:
+ - name: --scale-settings
+ long-summary: |
+ Usage: --scale-settings minimum=XX maximum=XX instance-count=XX scale-type=XX
+
+ - name: --id-asset-reference
+ long-summary: |
+ Usage: --id-asset-reference asset-id=XX reference-type=XX
+
+ reference-type: Required. Specifies the type of asset reference.
+ - name: --data-path-asset-reference
+ long-summary: |
+ Usage: --data-path-asset-reference path=XX datastore-id=XX reference-type=XX
+
+ reference-type: Required. Specifies the type of asset reference.
+ - name: --output-path-asset-reference
+ long-summary: |
+ Usage: --output-path-asset-reference path=XX job-id=XX reference-type=XX
+
+ reference-type: Required. Specifies the type of asset reference.
+ - name: --code-configuration
+ short-summary: "Code configuration for the endpoint deployment."
+ long-summary: |
+ Usage: --code-configuration code-artifact-id=XX command=XX
+
+ code-artifact-id: The ID of the code asset.
+ command: Required. The command to execute on startup of the job. eg. ["python", "train.py"]
+ examples:
+ - name: CreateOrUpdate Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment create --user-assigned-identities \
+"{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"te\
+nantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\
+\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"s\
+tring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" --kind "string" --location "string" --properties \
+description="string" codeConfiguration={"codeArtifactId":"string","command":"string"} deploymentConfiguration={"appInsi\
+ghtsEnabled":true,"computeType":"Managed","maxConcurrentRequestsPerInstance":0,"maxQueueWaitMs":0,"scoringTimeoutMs":0}\
+ environmentId="string" modelReference={"assetId":"string","referenceType":"Id"} properties={"additionalProp1":"string"\
+,"additionalProp2":"string","additionalProp3":"string"} scaleSettings={"instanceCount":0,"maximum":0,"minimum":0,"scale\
+Type":"Automatic"} --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --deployment-name \
+"testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment update'] = """
+ type: command
+ short-summary: "Update Online Deployment."
+ parameters:
+ - name: --scale-settings
+ long-summary: |
+ Usage: --scale-settings minimum=XX maximum=XX instance-count=XX scale-type=XX
+
+ examples:
+ - name: Update Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment update --user-assigned-identities \
+"{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"te\
+nantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\
+\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"s\
+tring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" --kind "string" --deployment-configuration \
+"{\\"appInsightsEnabled\\":true,\\"computeType\\":\\"Managed\\",\\"maxConcurrentRequestsPerInstance\\":0,\\"maxQueueWai\
+tMs\\":0,\\"scoringTimeoutMs\\":0}" --scale-settings instance-count=0 maximum=0 minimum=0 scale-type="Automatic" \
+--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" \
+--endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment delete'] = """
+ type: command
+ short-summary: "Delete Inference Endpoint Deployment."
+ examples:
+ - name: Delete Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment delete --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment get-log'] = """
+ type: command
+ short-summary: "Polls an Endpoint operation."
+ examples:
+ - name: GetLogs Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment get-log --container-type "StorageInitializer" --tail 0 \
+--deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices online-deployment wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices online-deployment \
+is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices online-deployment is \
+successfully created.
+ text: |-
+ az machinelearningservices online-deployment wait --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices online-deployment is \
+successfully updated.
+ text: |-
+ az machinelearningservices online-deployment wait --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices online-deployment is \
+successfully deleted.
+ text: |-
+ az machinelearningservices online-deployment wait --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123" --deleted
+"""
+
+helps['machinelearningservices online-endpoint'] = """
+ type: group
+ short-summary: Manage online endpoint with machinelearningservices
+"""
+
+helps['machinelearningservices online-endpoint list'] = """
+ type: command
+ short-summary: "List Online Endpoints."
+ examples:
+ - name: List Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint list --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices online-endpoint show'] = """
+ type: command
+ short-summary: "Get Online Endpoint."
+ examples:
+ - name: Get Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint show --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint create'] = """
+ type: command
+ short-summary: "Create Online Endpoint."
+ parameters:
+ - name: --aks-compute-configuration
+ long-summary: |
+ Usage: --aks-compute-configuration namespace=XX compute-name=XX compute-type=XX
+
+ - name: --managed-compute-configuration
+ long-summary: |
+ Usage: --managed-compute-configuration compute-type=XX
+
+ - name: --azure-ml-compute-configuration
+ long-summary: |
+ Usage: --azure-ml-compute-configuration compute-type=XX
+
+ examples:
+ - name: CreateOrUpdate Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint create --user-assigned-identities \
+"{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"te\
+nantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\
+\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"s\
+tring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" --kind "string" --location "string" --properties \
+description="string" authMode="AMLToken" computeConfiguration={"computeType":"Managed"} properties={"additionalProp1":"\
+string","additionalProp2":"string","additionalProp3":"string"} trafficRules={"additionalProp1":0,"additionalProp2":0,"a\
+dditionalProp3":0} --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint update'] = """
+ type: command
+ short-summary: "Update Online Endpoint."
+ examples:
+ - name: Update Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint update --user-assigned-identities \
+"{\\"additionalProp1\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"te\
+nantId\\":\\"string\\"},\\"additionalProp2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\
+\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"s\
+tring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}" --kind "string" --traffic-rules \
+additionalProp1=0 additionalProp2=0 additionalProp3=0 --tags additionalProp1="string" additionalProp2="string" \
+additionalProp3="string" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint delete'] = """
+ type: command
+ short-summary: "Delete Online Endpoint."
+ examples:
+ - name: Delete Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint delete --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint get-token'] = """
+ type: command
+ short-summary: "Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication."
+ examples:
+ - name: GetToken Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint get-token --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint list-key'] = """
+ type: command
+ short-summary: "List EndpointAuthKeys for an Endpoint using Key-based authentication."
+ examples:
+ - name: ListKeys Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint list-key --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint regenerate-key'] = """
+ type: command
+ short-summary: "Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication."
+ examples:
+ - name: RegenerateKeys Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint regenerate-key --key-type "Primary" --key-value "string" \
+--endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices online-endpoint \
+is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices online-endpoint is \
+successfully created.
+ text: |-
+ az machinelearningservices online-endpoint wait --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices online-endpoint is \
+successfully updated.
+ text: |-
+ az machinelearningservices online-endpoint wait --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices online-endpoint is \
+successfully deleted.
+ text: |-
+ az machinelearningservices online-endpoint wait --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123" --deleted
+"""
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_params.py b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
new file mode 100644
index 00000000000..1cc21da2b78
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
@@ -0,0 +1,1456 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+# pylint: disable=too-many-statements
+
+from azure.cli.core.commands.parameters import (
+ tags_type,
+ get_three_state_flag,
+ get_enum_type,
+ resource_group_name_type,
+ get_location_type
+)
+from azure.cli.core.commands.validators import (
+ get_default_location_from_resource_group,
+ validate_file_or_dict
+)
+from azext_machinelearningservices.action import (
+ AddSku,
+ AddSharedPrivateLinkResources,
+ AddKeyVaultProperties,
+ AddValue,
+ AddAdministratorAccount,
+ AddMachinelearningcomputeScaleSettings,
+ AddPrivateLinkServiceConnectionState,
+ AddLinkedservicesProperties,
+ AddCodecontainersProperties,
+ AddAssetPath,
+ AddCodeversionsProperties,
+ AddComponentcontainersProperties,
+ AddCodeConfiguration,
+ AddComponentversionsProperties,
+ AddDatacontainersProperties,
+ AddLinkedInfo,
+ AddDatastoresProperties,
+ AddGlusterFs,
+ AddDataversionsProperties,
+ AddEnvironmentcontainersProperties,
+ AddDockerImage,
+ AddDockerBuild,
+ AddEnvironmentspecificationversionsProperties,
+ AddLivenessRoute,
+ AddLabelingjobsProperties,
+ AddDatasetConfiguration,
+ AddLabelingJobImageProperties,
+ AddLabelingJobTextProperties,
+ AddInferencingComputeBinding,
+ AddCocoExportSummary,
+ AddCsvExportSummary,
+ AddDatasetExportSummary,
+ AddModelcontainersProperties,
+ AddModelversionsProperties,
+ AddOnlinedeploymentsScaleSettings,
+ AddOnlinedeploymentsProperties,
+ AddIdAssetReference,
+ AddDataPathAssetReference,
+ AddOutputPathAssetReference,
+ AddEnvironmentVariables,
+ AddProperties,
+ AddMachinelearningservicesOnlineEndpointCreateTrafficRules,
+ AddAksComputeConfiguration,
+ AddManagedComputeConfiguration,
+ AddAzureMlComputeConfiguration,
+ AddMachinelearningservicesOnlineEndpointUpdateTrafficRules
+)
+
+
+def load_arguments(self, _):
+
+ with self.argument_context('machinelearningservices workspace list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices workspace show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace. This name in mutable')
+ c.argument('key_vault', type=str, help='ARM id of the key vault associated with this workspace. This cannot be '
+ 'changed once the workspace has been created')
+ c.argument('application_insights', type=str, help='ARM id of the application insights associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('container_registry', type=str, help='ARM id of the container registry associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('storage_account', type=str, help='ARM id of the storage account associated with this workspace. '
+ 'This cannot be changed once the workspace has been created')
+ c.argument('discovery_url', type=str, help='Url for the discovery service to identify regional endpoints for '
+ 'machine learning experimentation services')
+ c.argument('hbi_workspace', arg_type=get_three_state_flag(), help='The flag to signal HBI data in the '
+ 'workspace and reduce diagnostic data collected by the service')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('allow_public_access_when_behind_vnet', arg_type=get_three_state_flag(), help='The flag to indicate '
+ 'whether to allow public access when behind VNet.')
+ c.argument('shared_private_link_resources', action=AddSharedPrivateLinkResources, nargs='+', help='The list of '
+ 'shared private link resources in this workspace.')
+ c.argument('status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or not the '
+ 'encryption is enabled for the workspace.', arg_group='Encryption')
+ c.argument('key_vault_properties', action=AddKeyVaultProperties, nargs='+', help='Customer Key vault '
+ 'properties.', arg_group='Encryption')
+
+ with self.argument_context('machinelearningservices workspace update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace.')
+
+ with self.argument_context('machinelearningservices workspace delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace resync-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace-feature list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices usage list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices virtual-machine-size list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota update') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name')
+ c.argument('value', action=AddValue, nargs='+', help='The list for update quota.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute aks create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('ak_s_compute_location', type=str, help='Location for the underlying compute')
+ c.argument('ak_s_description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('ak_s_resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('ak_s_properties', type=validate_file_or_dict, help='AKS properties Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute aml-compute create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('aml_compute_properties', type=validate_file_or_dict, help='AML Compute properties Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute compute-instance create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('compute_instance_properties', type=validate_file_or_dict, help='Compute Instance properties '
+ 'Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute data-factory create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+
+ with self.argument_context('machinelearningservices machine-learning-compute data-lake-analytics create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('data_lake_store_account_name', type=str, help='DataLake Store Account Name')
+
+ with self.argument_context('machinelearningservices machine-learning-compute databricks create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('databricks_access_token', type=str, help='Databricks access token')
+
+ with self.argument_context('machinelearningservices machine-learning-compute hd-insight create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('ssh_port', type=int, help='Port open for ssh connections on the master node of the cluster.')
+ c.argument('address', type=str, help='Public IP address of the master node of the cluster.')
+ c.argument('administrator_account', action=AddAdministratorAccount, nargs='+', help='Admin credentials for '
+ 'master node of the cluster')
+
+ with self.argument_context('machinelearningservices machine-learning-compute virtual-machine create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('virtual_machine_size', type=str, help='Virtual Machine size')
+ c.argument('ssh_port', type=int, help='Port open for ssh connections.')
+ c.argument('address', type=str, help='Public IP address of the virtual machine.')
+ c.argument('administrator_account', action=AddAdministratorAccount, nargs='+', help='Admin credentials for '
+ 'virtual machine')
+
+ with self.argument_context('machinelearningservices machine-learning-compute update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+ c.argument('scale_settings', action=AddMachinelearningcomputeScaleSettings, nargs='+', help='Desired scale '
+ 'settings for the amlCompute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+ c.argument('underlying_resource_action', arg_type=get_enum_type(['Delete', 'Detach']), help='Delete the '
+ 'underlying compute if \'Delete\', or detach the underlying compute from workspace if \'Detach\'.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list-node') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute restart') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute start') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute stop') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection put') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='A collection of information about the state of the connection between service consumer and '
+ 'provider.')
+
+ with self.argument_context('machinelearningservices private-link-resource list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices linked-service list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices linked-service show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('link_name', type=str, help='Friendly name of the linked workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices linked-service create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('link_name', type=str, help='Friendly name of the linked workspace')
+ c.argument('name', type=str, help='Friendly name of the linked service')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('properties', action=AddLinkedservicesProperties, nargs='+', help='LinkedService specific '
+ 'properties.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices linked-service delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('link_name', type=str, help='Friendly name of the linked workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-service list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('model_id', type=str, help='The Model Id.')
+ c.argument('model_name', type=str, help='The Model name.')
+ c.argument('tag', type=str, help='The object tag.')
+ c.argument('tags', tags_type)
+ c.argument('properties', type=str, help='A set of properties with which to filter the returned services. It is '
+ 'a comma separated string of properties key and/or properties key=value Example: '
+ 'propKey1,propKey2,propKey3=value3 .')
+ c.argument('run_id', type=str, help='runId for model associated with service.')
+ c.argument('expand', arg_type=get_three_state_flag(), help='Set to True to include Model details.')
+ c.argument('orderby', arg_type=get_enum_type(['CreatedAtDesc', 'CreatedAtAsc', 'UpdatedAtDesc',
+ 'UpdatedAtAsc']), help='The option to order the response.')
+
+ with self.argument_context('machinelearningservices machine-learning-service show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+ c.argument('expand', arg_type=get_three_state_flag(), help='Set to True to include Model details.')
+
+ with self.argument_context('machinelearningservices machine-learning-service create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.')
+ c.argument('properties', type=validate_file_or_dict, help='The payload that is used to create or update the '
+ 'Service. Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-service update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+ c.argument('properties', type=validate_file_or_dict, help='The payload that is used to create or update the '
+ 'Service. Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-service delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-service wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+ c.argument('expand', arg_type=get_three_state_flag(), help='Set to True to include Model details.')
+
+ with self.argument_context('machinelearningservices notebook list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices notebook prepare') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices workspace-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection')
+ c.argument('name', type=str, help='Friendly name of the workspace connection')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('auth_type', type=str, help='Authorization type of the workspace connection.')
+ c.argument('value', type=str, help='Value details of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices code-container list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices code-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices code-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('properties', action=AddCodecontainersProperties, nargs='+', help='Dictionary of Expect '
+ 'value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('description', type=str, help='')
+
+ with self.argument_context('machinelearningservices code-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('properties', action=AddCodecontainersProperties, nargs='+', help='Dictionary of Expect '
+ 'value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('description', type=str, help='')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices code-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices code-version list') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices code-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices code-version create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('version', type=str, help='Version identifier.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('datastore_id', type=str, help='The asset datastoreId')
+ c.argument('asset_path', action=AddAssetPath, nargs='+', help='DEPRECATED - use Microsoft.MachineLearning.Manag'
+ 'ementFrontEnd.Contracts.Assets.Asset.Path instead')
+ c.argument('path', type=str, help='The path of the file/directory.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddCodeversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+
+ with self.argument_context('machinelearningservices code-version update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('datastore_id', type=str, help='The asset datastoreId')
+ c.argument('asset_path', action=AddAssetPath, nargs='+', help='DEPRECATED - use Microsoft.MachineLearning.Manag'
+ 'ementFrontEnd.Contracts.Assets.Asset.Path instead')
+ c.argument('path', type=str, help='The path of the file/directory.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddCodeversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices code-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices component-container list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices component-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices component-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddComponentcontainersProperties, nargs='+', help='The asset property '
+ 'dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+
+ with self.argument_context('machinelearningservices component-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddComponentcontainersProperties, nargs='+', help='The asset property '
+ 'dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices component-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices component-version list') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices component-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices component-version create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('version', type=str, help='Version identifier.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('environment_id', type=str, help='Environment configuration of the component.')
+ c.argument('code_configuration', action=AddCodeConfiguration, nargs='+', help='Code configuration of the job. '
+ 'Includes CodeArtifactId and Command.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddComponentversionsProperties, nargs='+', help='The asset property '
+ 'dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('display_name', type=str, help='DisplayName of the component on the UI. Defaults to same as name.',
+ arg_group='Component')
+ c.argument('is_deterministic', arg_type=get_three_state_flag(), help='Whether or not its deterministic. '
+ 'Defaults to true.', arg_group='Component')
+ c.argument('inputs', type=validate_file_or_dict, help='Defines input ports of the component. The string key is '
+ 'the name of input, which should be a valid Python variable name. Expected value: '
+ 'json-string/@json-file.', arg_group='Component')
+ c.argument('outputs', type=validate_file_or_dict, help='Defines output ports of the component. The string key '
+ 'is the name of Output, which should be a valid Python variable name. Expected value: '
+ 'json-string/@json-file.', arg_group='Component')
+
+ with self.argument_context('machinelearningservices component-version update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('environment_id', type=str, help='Environment configuration of the component.')
+ c.argument('code_configuration', action=AddCodeConfiguration, nargs='+', help='Code configuration of the job. '
+ 'Includes CodeArtifactId and Command.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddComponentversionsProperties, nargs='+', help='The asset property '
+ 'dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('display_name', type=str, help='DisplayName of the component on the UI. Defaults to same as name.',
+ arg_group='Component')
+ c.argument('is_deterministic', arg_type=get_three_state_flag(), help='Whether or not its deterministic. '
+ 'Defaults to true.', arg_group='Component')
+ c.argument('inputs', type=validate_file_or_dict, help='Defines input ports of the component. The string key is '
+ 'the name of input, which should be a valid Python variable name. Expected value: '
+ 'json-string/@json-file.', arg_group='Component')
+ c.argument('outputs', type=validate_file_or_dict, help='Defines output ports of the component. The string key '
+ 'is the name of Output, which should be a valid Python variable name. Expected value: '
+ 'json-string/@json-file.', arg_group='Component')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices component-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices data-container list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices data-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices data-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('properties', action=AddDatacontainersProperties, nargs='+', help='Dictionary of Expect '
+ 'value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('description', type=str, help='')
+
+ with self.argument_context('machinelearningservices data-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('properties', action=AddDatacontainersProperties, nargs='+', help='Dictionary of Expect '
+ 'value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('description', type=str, help='')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices data-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices datastore list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('count', type=int, help='Maximum number of results to return.')
+ c.argument('is_default', arg_type=get_three_state_flag(), help='Filter down to the workspace default '
+ 'datastore.')
+ c.argument('names', nargs='+', help='Names of datastores to return.')
+ c.argument('search_text', type=str, help='Text to search for in the datastore names.')
+ c.argument('order_by', type=str, help='Order by property (createdtime | modifiedtime | name).')
+ c.argument('order_by_asc', arg_type=get_three_state_flag(), help='Order by property in ascending order.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices datastore show') as c:
+ c.argument('name', type=str, help='Datastore name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices datastore create') as c:
+ c.argument('name', type=str, help='Datastore name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('is_default', arg_type=get_three_state_flag(), help='Whether this datastore is the default for the '
+ 'workspace.')
+ c.argument('linked_info', action=AddLinkedInfo, nargs='+', help='Information about the datastore origin, if '
+ 'linked.')
+ c.argument('properties', action=AddDatastoresProperties, nargs='+', help='Dictionary of Expect value: '
+ 'KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('datastore_contents_type', arg_type=get_enum_type(['AzureBlob', 'AzureDataLake',
+ 'AzureDataLakeGen2', 'AzureFile', 'AzureMySql',
+ 'AzurePostgreSql', 'AzureSqlDatabase',
+ 'GlusterFs']), help='Storage type backing the '
+ 'datastore.', arg_group='Contents')
+ c.argument('azure_data_lake', type=validate_file_or_dict, help='Azure Data Lake (Gen1/2) storage information. '
+ 'Expected value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_my_sql', type=validate_file_or_dict, help='Azure Database for MySQL information. Expected '
+ 'value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_postgre_sql', type=validate_file_or_dict, help='Azure Database for PostgreSQL information. '
+ 'Expected value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_sql_database', type=validate_file_or_dict, help='Azure SQL Database information. Expected '
+ 'value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_storage', type=validate_file_or_dict, help='Azure storage account (blobs, files) '
+ 'information. Expected value: json-string/@json-file.', arg_group='Contents')
+ c.argument('gluster_fs', action=AddGlusterFs, nargs='+', help='GlusterFS volume information.',
+ arg_group='Contents')
+
+ with self.argument_context('machinelearningservices datastore update') as c:
+ c.argument('name', type=str, help='Datastore name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('is_default', arg_type=get_three_state_flag(), help='Whether this datastore is the default for the '
+ 'workspace.')
+ c.argument('linked_info', action=AddLinkedInfo, nargs='+', help='Information about the datastore origin, if '
+ 'linked.')
+ c.argument('properties', action=AddDatastoresProperties, nargs='+', help='Dictionary of Expect value: '
+ 'KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('datastore_contents_type', arg_type=get_enum_type(['AzureBlob', 'AzureDataLake',
+ 'AzureDataLakeGen2', 'AzureFile', 'AzureMySql',
+ 'AzurePostgreSql', 'AzureSqlDatabase',
+ 'GlusterFs']), help='Storage type backing the '
+ 'datastore.', arg_group='Contents')
+ c.argument('azure_data_lake', type=validate_file_or_dict, help='Azure Data Lake (Gen1/2) storage information. '
+ 'Expected value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_my_sql', type=validate_file_or_dict, help='Azure Database for MySQL information. Expected '
+ 'value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_postgre_sql', type=validate_file_or_dict, help='Azure Database for PostgreSQL information. '
+ 'Expected value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_sql_database', type=validate_file_or_dict, help='Azure SQL Database information. Expected '
+ 'value: json-string/@json-file.', arg_group='Contents')
+ c.argument('azure_storage', type=validate_file_or_dict, help='Azure storage account (blobs, files) '
+ 'information. Expected value: json-string/@json-file.', arg_group='Contents')
+ c.argument('gluster_fs', action=AddGlusterFs, nargs='+', help='GlusterFS volume information.',
+ arg_group='Contents')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices datastore delete') as c:
+ c.argument('name', type=str, help='Datastore name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices datastore list-secret') as c:
+ c.argument('name', type=str, help='Datastore name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices data-version list') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices data-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices data-version create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('version', type=str, help='Version identifier.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('dataset_type', arg_type=get_enum_type(['Simple', 'Dataflow']), help='The Format of dataset.')
+ c.argument('datastore_id', type=str, help='The asset datastoreId')
+ c.argument('asset_path', action=AddAssetPath, nargs='+', help='DEPRECATED - use Microsoft.MachineLearning.Manag'
+ 'ementFrontEnd.Contracts.Assets.Asset.Path instead')
+ c.argument('path', type=str, help='The path of the file/directory.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddDataversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+
+ with self.argument_context('machinelearningservices data-version update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('dataset_type', arg_type=get_enum_type(['Simple', 'Dataflow']), help='The Format of dataset.')
+ c.argument('datastore_id', type=str, help='The asset datastoreId')
+ c.argument('asset_path', action=AddAssetPath, nargs='+', help='DEPRECATED - use Microsoft.MachineLearning.Manag'
+ 'ementFrontEnd.Contracts.Assets.Asset.Path instead')
+ c.argument('path', type=str, help='The path of the file/directory.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddDataversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices data-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices environment-container list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices environment-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices environment-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('properties', action=AddEnvironmentcontainersProperties, nargs='+', help='Dictionary of '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('description', type=str, help='')
+
+ with self.argument_context('machinelearningservices environment-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('properties', action=AddEnvironmentcontainersProperties, nargs='+', help='Dictionary of '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('description', type=str, help='')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices environment-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices environment-specification-version list') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices environment-specification-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices environment-specification-version create') as c:
+ c.argument('name', type=str, help='Name of EnvironmentSpecificationVersion.')
+ c.argument('version', type=str, help='Version of EnvironmentSpecificationVersion.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('docker_image', action=AddDockerImage, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('docker_build', action=AddDockerBuild, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('conda_file', type=str, help='Standard configuration file used by conda that lets you install any '
+ 'kind of package, including Python, R, and C/C++ packages ')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddEnvironmentspecificationversionsProperties, nargs='+', help='The asset '
+ 'property dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('liveness_route', action=AddLivenessRoute, nargs='+', help='The route to check the liveness of the '
+ 'inference server container.', arg_group='Inference Container Properties')
+ c.argument('readiness_route', action=AddLivenessRoute, nargs='+', help='The route to check the readiness of '
+ 'the inference server container.', arg_group='Inference Container Properties')
+ c.argument('scoring_route', action=AddLivenessRoute, nargs='+', help='The port to send the scoring requests '
+ 'to, within the inference server container.', arg_group='Inference Container Properties')
+
+ with self.argument_context('machinelearningservices environment-specification-version update') as c:
+ c.argument('name', type=str, help='Name of EnvironmentSpecificationVersion.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version of EnvironmentSpecificationVersion.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('docker_image', action=AddDockerImage, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('docker_build', action=AddDockerBuild, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('conda_file', type=str, help='Standard configuration file used by conda that lets you install any '
+ 'kind of package, including Python, R, and C/C++ packages ')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddEnvironmentspecificationversionsProperties, nargs='+', help='The asset '
+ 'property dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('liveness_route', action=AddLivenessRoute, nargs='+', help='The route to check the liveness of the '
+ 'inference server container.', arg_group='Inference Container Properties')
+ c.argument('readiness_route', action=AddLivenessRoute, nargs='+', help='The route to check the readiness of '
+ 'the inference server container.', arg_group='Inference Container Properties')
+ c.argument('scoring_route', action=AddLivenessRoute, nargs='+', help='The port to send the scoring requests '
+ 'to, within the inference server container.', arg_group='Inference Container Properties')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices environment-specification-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('job_type', type=str, help='Type of job to be returned.')
+ c.argument('tags', tags_type)
+ c.argument('tag', type=str, help='Jobs returned will have this tag key.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices job show') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job create') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('properties', type=validate_file_or_dict, help='Job base definition Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices job update') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('properties', type=validate_file_or_dict, help='Job base definition Expected value: '
+ 'json-string/@json-file.')
+ c.ignore('id', 'body')
+
+ with self.argument_context('machinelearningservices job delete') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job cancel') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job wait') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('count', type=int, help='Number of labeling jobs to return.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices labeling-job show') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('include_job_instructions', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'whether to include JobInstructions in response.')
+ c.argument('include_label_categories', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'Whether to include LabelCategories in response.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job create') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddLabelingjobsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('label_categories', type=validate_file_or_dict, help='Label categories of the job. Expected value: '
+ 'json-string/@json-file.')
+ c.argument('dataset_configuration', action=AddDatasetConfiguration, nargs='+', help='Configuration of dataset '
+ 'used in the job.')
+ c.argument('labeling_job_image_properties', action=AddLabelingJobImageProperties, nargs='+', help='Properties '
+ 'of a labeling job for image data', arg_group='LabelingJobMediaProperties')
+ c.argument('labeling_job_text_properties', action=AddLabelingJobTextProperties, nargs='+', help='Properties of '
+ 'a labeling job for text data', arg_group='LabelingJobMediaProperties')
+ c.argument('inferencing_compute_binding', action=AddInferencingComputeBinding, nargs='+', help='AML compute '
+ 'binding used in inferencing.', arg_group='Ml Assist Configuration')
+ c.argument('training_compute_binding', action=AddInferencingComputeBinding, nargs='+', help='AML compute '
+ 'binding used in training.', arg_group='Ml Assist Configuration')
+ c.argument('ml_assist_enabled', arg_type=get_three_state_flag(), help='Indicates whether MLAssist feature is '
+ 'enabled.', arg_group='Ml Assist Configuration')
+ c.argument('uri', type=str, help='The link to a page with detailed labeling instructions for labelers.',
+ arg_group='Job Instructions')
+
+ with self.argument_context('machinelearningservices labeling-job update') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddLabelingjobsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('label_categories', type=validate_file_or_dict, help='Label categories of the job. Expected value: '
+ 'json-string/@json-file.')
+ c.argument('dataset_configuration', action=AddDatasetConfiguration, nargs='+', help='Configuration of dataset '
+ 'used in the job.')
+ c.argument('labeling_job_image_properties', action=AddLabelingJobImageProperties, nargs='+', help='Properties '
+ 'of a labeling job for image data', arg_group='LabelingJobMediaProperties')
+ c.argument('labeling_job_text_properties', action=AddLabelingJobTextProperties, nargs='+', help='Properties of '
+ 'a labeling job for text data', arg_group='LabelingJobMediaProperties')
+ c.argument('inferencing_compute_binding', action=AddInferencingComputeBinding, nargs='+', help='AML compute '
+ 'binding used in inferencing.', arg_group='Ml Assist Configuration')
+ c.argument('training_compute_binding', action=AddInferencingComputeBinding, nargs='+', help='AML compute '
+ 'binding used in training.', arg_group='Ml Assist Configuration')
+ c.argument('ml_assist_enabled', arg_type=get_three_state_flag(), help='Indicates whether MLAssist feature is '
+ 'enabled.', arg_group='Ml Assist Configuration')
+ c.argument('uri', type=str, help='The link to a page with detailed labeling instructions for labelers.',
+ arg_group='Job Instructions')
+ c.ignore('id', 'body')
+
+ with self.argument_context('machinelearningservices labeling-job delete') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job export-label') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('coco_export_summary', action=AddCocoExportSummary, nargs='+', help=' Expect value: KEY1=VALUE1 '
+ 'KEY2=VALUE2 ...', arg_group='Body')
+ c.argument('csv_export_summary', action=AddCsvExportSummary, nargs='+', help=' Expect value: KEY1=VALUE1 '
+ 'KEY2=VALUE2 ...', arg_group='Body')
+ c.argument('dataset_export_summary', action=AddDatasetExportSummary, nargs='+', help=' Expect value: '
+ 'KEY1=VALUE1 KEY2=VALUE2 ...', arg_group='Body')
+
+ with self.argument_context('machinelearningservices labeling-job pause') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job resume') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job wait') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('include_job_instructions', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'whether to include JobInstructions in response.')
+ c.argument('include_label_categories', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'Whether to include LabelCategories in response.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-container list') as c:
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('count', type=int, help='Maximum number of results to return.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices model-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddModelcontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+
+ with self.argument_context('machinelearningservices model-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddModelcontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices model-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-version list') as c:
+ c.argument('name', type=str, help='Model name.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('version', type=str, help='Model version.')
+ c.argument('description', type=str, help='Model description.')
+ c.argument('offset', type=int, help='Number of initial results to skip.')
+ c.argument('tags', tags_type)
+ c.argument('properties', type=str, help='Comma-separated list of property names (and optionally values). '
+ 'Example: prop1,prop2=value2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices model-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-version create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('version', type=str, help='Version identifier.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('stage', type=str, help='Model asset stage.')
+ c.argument('flavors', type=validate_file_or_dict, help='Dictionary mapping model flavors to their properties. '
+ 'Expected value: json-string/@json-file.')
+ c.argument('datastore_id', type=str, help='The asset datastoreId')
+ c.argument('asset_path', action=AddAssetPath, nargs='+', help='DEPRECATED - use Microsoft.MachineLearning.Manag'
+ 'ementFrontEnd.Contracts.Assets.Asset.Path instead')
+ c.argument('path', type=str, help='The path of the file/directory.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddModelversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+
+ with self.argument_context('machinelearningservices model-version update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('stage', type=str, help='Model asset stage.')
+ c.argument('flavors', type=validate_file_or_dict, help='Dictionary mapping model flavors to their properties. '
+ 'Expected value: json-string/@json-file.')
+ c.argument('datastore_id', type=str, help='The asset datastoreId')
+ c.argument('asset_path', action=AddAssetPath, nargs='+', help='DEPRECATED - use Microsoft.MachineLearning.Manag'
+ 'ementFrontEnd.Contracts.Assets.Asset.Path instead')
+ c.argument('path', type=str, help='The path of the file/directory.')
+ c.argument('generated_by', arg_type=get_enum_type(['User', 'System']), help='If the name version are system '
+ 'generated (anonymous registration) or user generated.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('tags', tags_type)
+ c.argument('properties', action=AddModelversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices model-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-deployment list') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Top of list.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices online-deployment show') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-deployment create') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='')
+ c.argument('scale_settings', action=AddOnlinedeploymentsScaleSettings, nargs='+', help='')
+ c.argument('deployment_configuration', type=validate_file_or_dict, help=' Expected value: '
+ 'json-string/@json-file.')
+ c.argument('description', type=str, help='Description of the endpoint deployment.')
+ c.argument('properties', action=AddOnlinedeploymentsProperties, nargs='+', help='Property dictionary. '
+ 'Properties can be added, but not removed or altered. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('id_asset_reference', action=AddIdAssetReference, nargs='+', help='', arg_group='ModelReference')
+ c.argument('data_path_asset_reference', action=AddDataPathAssetReference, nargs='+', help='',
+ arg_group='ModelReference')
+ c.argument('output_path_asset_reference', action=AddOutputPathAssetReference, nargs='+', help='',
+ arg_group='ModelReference')
+ c.argument('code_configuration', action=AddCodeConfiguration, nargs='+', help='Code configuration for the '
+ 'endpoint deployment.')
+ c.argument('environment_id', type=str, help='Environment specification for the endpoint deployment.')
+ c.argument('environment_variables', action=AddEnvironmentVariables, nargs='+', help='Environment variables '
+ 'configuration for the deployment. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ResourceId of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-deployment update') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='')
+ c.argument('scale_settings', action=AddOnlinedeploymentsScaleSettings, nargs='+', help='')
+ c.argument('deployment_configuration', type=validate_file_or_dict, help=' Expected value: '
+ 'json-string/@json-file.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ResourceId of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-deployment delete') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-deployment get-log') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='The name and identifier for the endpoint.',
+ id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('container_type', arg_type=get_enum_type(['StorageInitializer', 'InferenceServer']), help='The type '
+ 'of container to retrieve logs from.')
+ c.argument('tail', type=int, help='The maximum number of lines to tail.')
+
+ with self.argument_context('machinelearningservices online-deployment wait') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint list') as c:
+ c.argument('name', type=str, help='Name of the endpoint.')
+ c.argument('count', type=int, help='Number of endpoints to be retrieved in a page of results.')
+ c.argument('compute_type', arg_type=get_enum_type(['Managed', 'AKS', 'AzureMLCompute']),
+ help='EndpointComputeType to be filtered by.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+ c.argument('tags', tags_type)
+ c.argument('properties', type=str, help='A set of properties with which to filter the returned models. It is a '
+ 'comma separated string of properties key and/or properties key=value Example: '
+ 'propKey1,propKey2,propKey3=value3 .')
+ c.argument('order_by', arg_type=get_enum_type(['CreatedAtDesc', 'CreatedAtAsc', 'UpdatedAtDesc',
+ 'UpdatedAtAsc']), help='The option to order the response.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices online-endpoint show') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint create') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='')
+ c.argument('description', type=str, help='Description of the inference endpoint.')
+ c.argument('properties', action=AddProperties, nargs='+', help='Property dictionary. Properties can be added, '
+ 'but not removed or altered. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('traffic_rules', action=AddMachinelearningservicesOnlineEndpointCreateTrafficRules, nargs='+',
+ help='Traffic rules on how the traffic will be routed across deployments. Expect value: KEY1=VALUE1 '
+ 'KEY2=VALUE2 ...')
+ c.argument('aks_compute_configuration', action=AddAksComputeConfiguration, nargs='+', help='',
+ arg_group='ComputeConfiguration')
+ c.argument('managed_compute_configuration', action=AddManagedComputeConfiguration, nargs='+', help=' Expect '
+ 'value: KEY1=VALUE1 KEY2=VALUE2 ...', arg_group='ComputeConfiguration')
+ c.argument('azure_ml_compute_configuration', action=AddAzureMlComputeConfiguration, nargs='+', help=' Expect '
+ 'value: KEY1=VALUE1 KEY2=VALUE2 ...', arg_group='ComputeConfiguration')
+ c.argument('auth_mode', arg_type=get_enum_type(['AMLToken', 'Key', 'AADToken']), help='Inference endpoint '
+ 'authentication mode type')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ResourceId of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-endpoint update') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='')
+ c.argument('traffic_rules', action=AddMachinelearningservicesOnlineEndpointUpdateTrafficRules, nargs='+',
+ help='Traffic rules on how the traffic will be routed across deployments. Expect value: KEY1=VALUE1 '
+ 'KEY2=VALUE2 ...')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ResourceId of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-endpoint delete') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint get-token') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint list-key') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices online-endpoint regenerate-key') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('key_type', arg_type=get_enum_type(['Primary', 'Secondary']), help='Specification for which type of '
+ 'key to generate. Primary or Secondary.')
+ c.argument('key_value', type=str, help='The value the key is set to.')
+
+ with self.argument_context('machinelearningservices online-endpoint wait') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
new file mode 100644
index 00000000000..b33a44c1ebf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
@@ -0,0 +1,9 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/action.py b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
new file mode 100644
index 00000000000..dee89f34ad1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
@@ -0,0 +1,1148 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access
+
+import argparse
+from collections import defaultdict
+from knack.util import CLIError
+
+
+class AddSku(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sku = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'tier':
+ d['tier'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, tier'.
+ format(k))
+ return d
+
+
+class AddSharedPrivateLinkResources(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddSharedPrivateLinkResources, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'private-link-resource-id':
+ d['private_link_resource_id'] = v[0]
+ elif kl == 'group-id':
+ d['group_id'] = v[0]
+ elif kl == 'request-message':
+ d['request_message'] = v[0]
+ elif kl == 'status':
+ d['status'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter shared_private_link_resources. All '
+ 'possible keys are: name, private-link-resource-id, group-id, request-message, status'.
+ format(k))
+ return d
+
+
+class AddKeyVaultProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.key_vault_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'key-vault-arm-id':
+ d['key_vault_arm_id'] = v[0]
+ elif kl == 'key-identifier':
+ d['key_identifier'] = v[0]
+ elif kl == 'identity-client-id':
+ d['identity_client_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter key_vault_properties. All possible keys '
+ 'are: key-vault-arm-id, key-identifier, identity-client-id'.format(k))
+ return d
+
+
+class AddValue(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddValue, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'id':
+ d['id'] = v[0]
+ elif kl == 'type':
+ d['type'] = v[0]
+ elif kl == 'limit':
+ d['limit'] = v[0]
+ elif kl == 'unit':
+ d['unit'] = v[0]
+ elif kl == 'location':
+ d['location'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter value. All possible keys are: id, type, '
+ 'limit, unit, location'.format(k))
+ return d
+
+
+class AddAdministratorAccount(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.administrator_account = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'username':
+ d['username'] = v[0]
+ elif kl == 'password':
+ d['password'] = v[0]
+ elif kl == 'public-key-data':
+ d['public_key_data'] = v[0]
+ elif kl == 'private-key-data':
+ d['private_key_data'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter administrator_account. All possible keys '
+ 'are: username, password, public-key-data, private-key-data'.format(k))
+ return d
+
+
+class AddMachinelearningcomputeScaleSettings(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scale_settings = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['min_node_count'] = 0
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'max-node-count':
+ d['max_node_count'] = v[0]
+ elif kl == 'min-node-count':
+ d['min_node_count'] = v[0]
+ elif kl == 'node-idle-time-before-scale-down':
+ d['node_idle_time_before_scale_down'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scale_settings. All possible keys are: '
+ 'max-node-count, min-node-count, node-idle-time-before-scale-down'.format(k))
+ return d
+
+
+class AddPrivateLinkServiceConnectionState(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.private_link_service_connection_state = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'status':
+ d['status'] = v[0]
+ elif kl == 'description':
+ d['description'] = v[0]
+ elif kl == 'actions-required':
+ d['actions_required'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. '
+ 'All possible keys are: status, description, actions-required'.format(k))
+ return d
+
+
+class AddLinkedservicesProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['link_type'] = "Synapse"
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'linked-service-resource-id':
+ d['linked_service_resource_id'] = v[0]
+ elif kl == 'created-time':
+ d['created_time'] = v[0]
+ elif kl == 'modified-time':
+ d['modified_time'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter properties. All possible keys are: '
+ 'linked-service-resource-id, created-time, modified-time'.format(k))
+ return d
+
+
+class AddCodecontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddAssetPath(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.asset_path = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'path':
+ d['path'] = v[0]
+ elif kl == 'is-directory':
+ d['is_directory'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter asset_path. All possible keys are: path, '
+ 'is-directory'.format(k))
+ return d
+
+
+class AddCodeversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddComponentcontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddCodeConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.code_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'code-artifact-id':
+ d['code_artifact_id'] = v[0]
+ elif kl == 'command':
+ d['command'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter code_configuration. All possible keys '
+ 'are: code-artifact-id, command'.format(k))
+ return d
+
+
+class AddComponentversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddDatacontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddLinkedInfo(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.linked_info = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'linked-id':
+ d['linked_id'] = v[0]
+ elif kl == 'linked-resource-name':
+ d['linked_resource_name'] = v[0]
+ elif kl == 'origin':
+ d['origin'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter linked_info. All possible keys are: '
+ 'linked-id, linked-resource-name, origin'.format(k))
+ return d
+
+
+class AddDatastoresProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddGlusterFs(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.gluster_fs = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'server-address':
+ d['server_address'] = v[0]
+ elif kl == 'volume-name':
+ d['volume_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter gluster_fs. All possible keys are: '
+ 'server-address, volume-name'.format(k))
+ return d
+
+
+class AddDataversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddEnvironmentcontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddDockerImage(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.docker_image = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'docker-image-uri':
+ d['docker_image_uri'] = v[0]
+ elif kl == 'operating-system-type':
+ d['operating_system_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter docker_image. All possible keys are: '
+ 'docker-image-uri, operating-system-type'.format(k))
+ d['docker_specification_type'] = 'Image'
+ return d
+
+
+class AddDockerBuild(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.docker_build = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'dockerfile':
+ d['dockerfile'] = v[0]
+ elif kl == 'context':
+ d['context'] = v[0]
+ elif kl == 'operating-system-type':
+ d['operating_system_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter docker_build. All possible keys are: '
+ 'dockerfile, context, operating-system-type'.format(k))
+ d['docker_specification_type'] = 'Build'
+ return d
+
+
+class AddEnvironmentspecificationversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddLivenessRoute(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.liveness_route = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'path':
+ d['path'] = v[0]
+ elif kl == 'port':
+ d['port'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter liveness_route. All possible keys are: '
+ 'path, port'.format(k))
+ return d
+
+
+class AddLabelingjobsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddDatasetConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.dataset_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'asset-name':
+ d['asset_name'] = v[0]
+ elif kl == 'incremental-dataset-refresh-enabled':
+ d['incremental_dataset_refresh_enabled'] = v[0]
+ elif kl == 'dataset-version':
+ d['dataset_version'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter dataset_configuration. All possible keys '
+ 'are: asset-name, incremental-dataset-refresh-enabled, dataset-version'.format(k))
+ return d
+
+
+class AddLabelingJobImageProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.labeling_job_image_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'annotation-type':
+ d['annotation_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter labeling_job_image_properties. All '
+ 'possible keys are: annotation-type'.format(k))
+ d['media_type'] = 'Image'
+ return d
+
+
+class AddLabelingJobTextProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.labeling_job_text_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'annotation-type':
+ d['annotation_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter labeling_job_text_properties. All '
+ 'possible keys are: annotation-type'.format(k))
+ d['media_type'] = 'Text'
+ return d
+
+
+class AddInferencingComputeBinding(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.inferencing_compute_binding = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'compute-id':
+ d['compute_id'] = v[0]
+ elif kl == 'node-count':
+ d['node_count'] = v[0]
+ elif kl == 'is-local':
+ d['is_local'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter inferencing_compute_binding. All possible '
+ 'keys are: compute-id, node-count, is-local'.format(k))
+ return d
+
+
+class AddCocoExportSummary(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.coco_export_summary = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['format'] = 'Coco'
+ return d
+
+
+class AddCsvExportSummary(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.csv_export_summary = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['format'] = 'CSV'
+ return d
+
+
+class AddDatasetExportSummary(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.dataset_export_summary = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['format'] = 'Dataset'
+ return d
+
+
+class AddModelcontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddModelversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddOnlinedeploymentsScaleSettings(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scale_settings = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'minimum':
+ d['minimum'] = v[0]
+ elif kl == 'maximum':
+ d['maximum'] = v[0]
+ elif kl == 'instance-count':
+ d['instance_count'] = v[0]
+ elif kl == 'scale-type':
+ d['scale_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scale_settings. All possible keys are: '
+ 'minimum, maximum, instance-count, scale-type'.format(k))
+ return d
+
+
+class AddOnlinedeploymentsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddIdAssetReference(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.id_asset_reference = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'asset-id':
+ d['asset_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter id_asset_reference. All possible keys '
+ 'are: asset-id'.format(k))
+ d['reference_type'] = 'Id'
+ return d
+
+
+class AddDataPathAssetReference(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.data_path_asset_reference = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'path':
+ d['path'] = v[0]
+ elif kl == 'datastore-id':
+ d['datastore_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter data_path_asset_reference. All possible '
+ 'keys are: path, datastore-id'.format(k))
+ d['reference_type'] = 'DataPath'
+ return d
+
+
+class AddOutputPathAssetReference(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.output_path_asset_reference = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'path':
+ d['path'] = v[0]
+ elif kl == 'job-id':
+ d['job_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter output_path_asset_reference. All possible '
+ 'keys are: path, job-id'.format(k))
+ d['reference_type'] = 'OutputPath'
+ return d
+
+
+class AddEnvironmentVariables(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.environment_variables = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddMachinelearningservicesOnlineEndpointCreateTrafficRules(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.traffic_rules = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddAksComputeConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.aks_compute_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'namespace':
+ d['namespace'] = v[0]
+ elif kl == 'compute-name':
+ d['compute_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter aks_compute_configuration. All possible '
+ 'keys are: namespace, compute-name'.format(k))
+ d['compute_type'] = 'AKS'
+ return d
+
+
+class AddManagedComputeConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.managed_compute_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['compute_type'] = 'Managed'
+ return d
+
+
+class AddAzureMlComputeConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.azure_ml_compute_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['compute_type'] = 'AzureMLCompute'
+ return d
+
+
+class AddMachinelearningservicesOnlineEndpointUpdateTrafficRules(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.traffic_rules = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/commands.py b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
new file mode 100644
index 00000000000..3e603e485fa
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
@@ -0,0 +1,415 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-statements
+# pylint: disable=too-many-locals
+
+from azure.cli.core.commands import CliCommandType
+
+
+def load_command_table(self, _):
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace
+ machinelearningservices_workspace = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspaces_ope'
+ 'rations#WorkspacesOperations.{}',
+ client_factory=cf_workspace)
+ with self.command_group('machinelearningservices workspace', machinelearningservices_workspace,
+ client_factory=cf_workspace) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_show')
+ g.custom_command('create', 'machinelearningservices_workspace_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_workspace_update')
+ g.custom_command('delete', 'machinelearningservices_workspace_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_workspace_list_key')
+ g.custom_command('resync-key', 'machinelearningservices_workspace_resync_key')
+ g.custom_wait_command('wait', 'machinelearningservices_workspace_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_feature
+ machinelearningservices_workspace_feature = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_feat'
+ 'ures_operations#WorkspaceFeaturesOperations.{}',
+ client_factory=cf_workspace_feature)
+ with self.command_group('machinelearningservices workspace-feature', machinelearningservices_workspace_feature,
+ client_factory=cf_workspace_feature) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_feature_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_usage
+ machinelearningservices_usage = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._usages_operati'
+ 'ons#UsagesOperations.{}',
+ client_factory=cf_usage)
+ with self.command_group('machinelearningservices usage', machinelearningservices_usage,
+ client_factory=cf_usage) as g:
+ g.custom_command('list', 'machinelearningservices_usage_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_virtual_machine_size
+ machinelearningservices_virtual_machine_size = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._virtual_machin'
+ 'e_sizes_operations#VirtualMachineSizesOperations.{}',
+ client_factory=cf_virtual_machine_size)
+ with self.command_group('machinelearningservices virtual-machine-size',
+ machinelearningservices_virtual_machine_size,
+ client_factory=cf_virtual_machine_size) as g:
+ g.custom_command('list', 'machinelearningservices_virtual_machine_size_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_quota
+ machinelearningservices_quota = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._quotas_operati'
+ 'ons#QuotasOperations.{}',
+ client_factory=cf_quota)
+ with self.command_group('machinelearningservices quota', machinelearningservices_quota,
+ client_factory=cf_quota) as g:
+ g.custom_command('list', 'machinelearningservices_quota_list')
+ g.custom_command('update', 'machinelearningservices_quota_update')
+
+ from azext_machinelearningservices.generated._client_factory import cf_machine_learning_compute
+ machinelearningservices_machine_learning_compute = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._machine_learni'
+ 'ng_compute_operations#MachineLearningComputeOperations.{}',
+ client_factory=cf_machine_learning_compute)
+ with self.command_group('machinelearningservices machine-learning-compute',
+ machinelearningservices_machine_learning_compute,
+ client_factory=cf_machine_learning_compute) as g:
+ g.custom_command('list', 'machinelearningservices_machine_learning_compute_list')
+ g.custom_show_command('show', 'machinelearningservices_machine_learning_compute_show')
+ g.custom_command('aks create', 'machinelearningservices_machine_learning_compute_aks_create',
+ supports_no_wait=True)
+ g.custom_command('aml-compute create', 'machinelearningservices_machine_learning_compute_aml_compute_create',
+ supports_no_wait=True)
+ g.custom_command('compute-instance create', 'machinelearningservices_machine_learning_compute_compute_instance_'
+ 'create', supports_no_wait=True)
+ g.custom_command('data-factory create', 'machinelearningservices_machine_learning_compute_data_factory_create',
+ supports_no_wait=True)
+ g.custom_command('data-lake-analytics create', 'machinelearningservices_machine_learning_compute_data_lake_anal'
+ 'ytics_create', supports_no_wait=True)
+ g.custom_command('databricks create', 'machinelearningservices_machine_learning_compute_databricks_create',
+ supports_no_wait=True)
+ g.custom_command('hd-insight create', 'machinelearningservices_machine_learning_compute_hd_insight_create',
+ supports_no_wait=True)
+ g.custom_command('virtual-machine create', 'machinelearningservices_machine_learning_compute_virtual_machine_cr'
+ 'eate', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_machine_learning_compute_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_machine_learning_compute_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_machine_learning_compute_list_key')
+ g.custom_command('list-node', 'machinelearningservices_machine_learning_compute_list_node')
+ g.custom_command('restart', 'machinelearningservices_machine_learning_compute_restart')
+ g.custom_command('start', 'machinelearningservices_machine_learning_compute_start')
+ g.custom_command('stop', 'machinelearningservices_machine_learning_compute_stop')
+ g.custom_wait_command('wait', 'machinelearningservices_machine_learning_compute_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices
+ machinelearningservices_ = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._model_operatio'
+ 'ns#AzureMachineLearningWorkspacesOperationsMixin.{}',
+ client_factory=cf_machinelearningservices)
+ with self.command_group('machinelearningservices', machinelearningservices_,
+ client_factory=cf_machinelearningservices, is_experimental=True) as g:
+ g.custom_command('list-sku', 'machinelearningservices_list_sku')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_endpoint_connection
+ machinelearningservices_private_endpoint_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_endpoi'
+ 'nt_connections_operations#PrivateEndpointConnectionsOperations.{}',
+ client_factory=cf_private_endpoint_connection)
+ with self.command_group('machinelearningservices private-endpoint-connection',
+ machinelearningservices_private_endpoint_connection,
+ client_factory=cf_private_endpoint_connection) as g:
+ g.custom_show_command('show', 'machinelearningservices_private_endpoint_connection_show')
+ g.custom_command('delete', 'machinelearningservices_private_endpoint_connection_delete', confirmation=True)
+ g.custom_command('put', 'machinelearningservices_private_endpoint_connection_put')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_link_resource
+ machinelearningservices_private_link_resource = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_link_r'
+ 'esources_operations#PrivateLinkResourcesOperations.{}',
+ client_factory=cf_private_link_resource)
+ with self.command_group('machinelearningservices private-link-resource',
+ machinelearningservices_private_link_resource,
+ client_factory=cf_private_link_resource) as g:
+ g.custom_command('list', 'machinelearningservices_private_link_resource_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_linked_service
+ machinelearningservices_linked_service = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._linked_service'
+ 's_operations#LinkedServicesOperations.{}',
+ client_factory=cf_linked_service)
+ with self.command_group('machinelearningservices linked-service', machinelearningservices_linked_service,
+ client_factory=cf_linked_service) as g:
+ g.custom_command('list', 'machinelearningservices_linked_service_list')
+ g.custom_show_command('show', 'machinelearningservices_linked_service_show')
+ g.custom_command('create', 'machinelearningservices_linked_service_create')
+ g.custom_command('delete', 'machinelearningservices_linked_service_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_machine_learning_service
+ machinelearningservices_machine_learning_service = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._machine_learni'
+ 'ng_service_operations#MachineLearningServiceOperations.{}',
+ client_factory=cf_machine_learning_service)
+ with self.command_group('machinelearningservices machine-learning-service',
+ machinelearningservices_machine_learning_service,
+ client_factory=cf_machine_learning_service) as g:
+ g.custom_command('list', 'machinelearningservices_machine_learning_service_list')
+ g.custom_show_command('show', 'machinelearningservices_machine_learning_service_show')
+ g.custom_command('create', 'machinelearningservices_machine_learning_service_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_machine_learning_service_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_machine_learning_service_delete', confirmation=True)
+ g.custom_wait_command('wait', 'machinelearningservices_machine_learning_service_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_notebook
+ machinelearningservices_notebook = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._notebooks_oper'
+ 'ations#NotebooksOperations.{}',
+ client_factory=cf_notebook)
+ with self.command_group('machinelearningservices notebook', machinelearningservices_notebook,
+ client_factory=cf_notebook) as g:
+ g.custom_command('list-key', 'machinelearningservices_notebook_list_key')
+ g.custom_command('prepare', 'machinelearningservices_notebook_prepare')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_connection
+ machinelearningservices_workspace_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_conn'
+ 'ections_operations#WorkspaceConnectionsOperations.{}',
+ client_factory=cf_workspace_connection)
+ with self.command_group('machinelearningservices workspace-connection',
+ machinelearningservices_workspace_connection,
+ client_factory=cf_workspace_connection) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_connection_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_connection_show')
+ g.custom_command('create', 'machinelearningservices_workspace_connection_create')
+ g.custom_command('delete', 'machinelearningservices_workspace_connection_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_code_container
+ machinelearningservices_code_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._code_container'
+ 's_operations#CodeContainersOperations.{}',
+ client_factory=cf_code_container)
+ with self.command_group('machinelearningservices code-container', machinelearningservices_code_container,
+ client_factory=cf_code_container) as g:
+ g.custom_command('list', 'machinelearningservices_code_container_list')
+ g.custom_show_command('show', 'machinelearningservices_code_container_show')
+ g.custom_command('create', 'machinelearningservices_code_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_code_conta'
+ 'iner_update')
+ g.custom_command('delete', 'machinelearningservices_code_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_code_version
+ machinelearningservices_code_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._code_versions_'
+ 'operations#CodeVersionsOperations.{}',
+ client_factory=cf_code_version)
+ with self.command_group('machinelearningservices code-version', machinelearningservices_code_version,
+ client_factory=cf_code_version) as g:
+ g.custom_command('list', 'machinelearningservices_code_version_list')
+ g.custom_show_command('show', 'machinelearningservices_code_version_show')
+ g.custom_command('create', 'machinelearningservices_code_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_code_versi'
+ 'on_update')
+ g.custom_command('delete', 'machinelearningservices_code_version_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_component_container
+ machinelearningservices_component_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._component_cont'
+ 'ainers_operations#ComponentContainersOperations.{}',
+ client_factory=cf_component_container)
+ with self.command_group('machinelearningservices component-container', machinelearningservices_component_container,
+ client_factory=cf_component_container) as g:
+ g.custom_command('list', 'machinelearningservices_component_container_list')
+ g.custom_show_command('show', 'machinelearningservices_component_container_show')
+ g.custom_command('create', 'machinelearningservices_component_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_component_'
+ 'container_update')
+ g.custom_command('delete', 'machinelearningservices_component_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_component_version
+ machinelearningservices_component_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._component_vers'
+ 'ions_operations#ComponentVersionsOperations.{}',
+ client_factory=cf_component_version)
+ with self.command_group('machinelearningservices component-version', machinelearningservices_component_version,
+ client_factory=cf_component_version) as g:
+ g.custom_command('list', 'machinelearningservices_component_version_list')
+ g.custom_show_command('show', 'machinelearningservices_component_version_show')
+ g.custom_command('create', 'machinelearningservices_component_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_component_'
+ 'version_update')
+ g.custom_command('delete', 'machinelearningservices_component_version_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_data_container
+ machinelearningservices_data_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._data_container'
+ 's_operations#DataContainersOperations.{}',
+ client_factory=cf_data_container)
+ with self.command_group('machinelearningservices data-container', machinelearningservices_data_container,
+ client_factory=cf_data_container) as g:
+ g.custom_command('list', 'machinelearningservices_data_container_list')
+ g.custom_show_command('show', 'machinelearningservices_data_container_show')
+ g.custom_command('create', 'machinelearningservices_data_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_data_conta'
+ 'iner_update')
+ g.custom_command('delete', 'machinelearningservices_data_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_datastore
+ machinelearningservices_datastore = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._datastores_ope'
+ 'rations#DatastoresOperations.{}',
+ client_factory=cf_datastore)
+ with self.command_group('machinelearningservices datastore', machinelearningservices_datastore,
+ client_factory=cf_datastore) as g:
+ g.custom_command('list', 'machinelearningservices_datastore_list')
+ g.custom_show_command('show', 'machinelearningservices_datastore_show')
+ g.custom_command('create', 'machinelearningservices_datastore_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_datastore_'
+ 'update')
+ g.custom_command('delete', 'machinelearningservices_datastore_delete', confirmation=True)
+ g.custom_command('list-secret', 'machinelearningservices_datastore_list_secret')
+
+ from azext_machinelearningservices.generated._client_factory import cf_data_version
+ machinelearningservices_data_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._data_versions_'
+ 'operations#DataVersionsOperations.{}',
+ client_factory=cf_data_version)
+ with self.command_group('machinelearningservices data-version', machinelearningservices_data_version,
+ client_factory=cf_data_version) as g:
+ g.custom_command('list', 'machinelearningservices_data_version_list')
+ g.custom_show_command('show', 'machinelearningservices_data_version_show')
+ g.custom_command('create', 'machinelearningservices_data_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_data_versi'
+ 'on_update')
+ g.custom_command('delete', 'machinelearningservices_data_version_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_environment_container
+ machinelearningservices_environment_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._environment_co'
+ 'ntainers_operations#EnvironmentContainersOperations.{}',
+ client_factory=cf_environment_container)
+ with self.command_group('machinelearningservices environment-container',
+ machinelearningservices_environment_container,
+ client_factory=cf_environment_container) as g:
+ g.custom_command('list', 'machinelearningservices_environment_container_list')
+ g.custom_show_command('show', 'machinelearningservices_environment_container_show')
+ g.custom_command('create', 'machinelearningservices_environment_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_environmen'
+ 't_container_update')
+ g.custom_command('delete', 'machinelearningservices_environment_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_environment_specification_version
+ machinelearningservices_environment_specification_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._environment_sp'
+ 'ecification_versions_operations#EnvironmentSpecificationVersionsOperations.{}',
+ client_factory=cf_environment_specification_version)
+ with self.command_group('machinelearningservices environment-specification-version',
+ machinelearningservices_environment_specification_version,
+ client_factory=cf_environment_specification_version) as g:
+ g.custom_command('list', 'machinelearningservices_environment_specification_version_list')
+ g.custom_show_command('show', 'machinelearningservices_environment_specification_version_show')
+ g.custom_command('create', 'machinelearningservices_environment_specification_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_environmen'
+ 't_specification_version_update')
+ g.custom_command('delete', 'machinelearningservices_environment_specification_version_delete',
+ confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_job
+ machinelearningservices_job = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._jobs_operation'
+ 's#JobsOperations.{}',
+ client_factory=cf_job)
+ with self.command_group('machinelearningservices job', machinelearningservices_job, client_factory=cf_job) as g:
+ g.custom_command('list', 'machinelearningservices_job_list')
+ g.custom_show_command('show', 'machinelearningservices_job_show')
+ g.custom_command('create', 'machinelearningservices_job_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_job_update'
+ '')
+ g.custom_command('delete', 'machinelearningservices_job_delete', supports_no_wait=True, confirmation=True)
+ g.custom_command('cancel', 'machinelearningservices_job_cancel')
+ g.custom_wait_command('wait', 'machinelearningservices_job_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_labeling_job
+ machinelearningservices_labeling_job = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._labeling_jobs_'
+ 'operations#LabelingJobsOperations.{}',
+ client_factory=cf_labeling_job)
+ with self.command_group('machinelearningservices labeling-job', machinelearningservices_labeling_job,
+ client_factory=cf_labeling_job) as g:
+ g.custom_command('list', 'machinelearningservices_labeling_job_list')
+ g.custom_show_command('show', 'machinelearningservices_labeling_job_show')
+ g.custom_command('create', 'machinelearningservices_labeling_job_create', supports_no_wait=True)
+ g.generic_update_command('update', setter_arg_name='body', setter_name='begin_create_or_update',
+ custom_func_name='machinelearningservices_labeling_job_update',
+ supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_labeling_job_delete', confirmation=True)
+ g.custom_command('export-label', 'machinelearningservices_labeling_job_export_label', supports_no_wait=True)
+ g.custom_command('pause', 'machinelearningservices_labeling_job_pause')
+ g.custom_command('resume', 'machinelearningservices_labeling_job_resume', supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_labeling_job_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_model_container
+ machinelearningservices_model_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._model_containe'
+ 'rs_operations#ModelContainersOperations.{}',
+ client_factory=cf_model_container)
+ with self.command_group('machinelearningservices model-container', machinelearningservices_model_container,
+ client_factory=cf_model_container) as g:
+ g.custom_command('list', 'machinelearningservices_model_container_list')
+ g.custom_show_command('show', 'machinelearningservices_model_container_show')
+ g.custom_command('create', 'machinelearningservices_model_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_model_cont'
+ 'ainer_update')
+ g.custom_command('delete', 'machinelearningservices_model_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_model_version
+ machinelearningservices_model_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._model_versions'
+ '_operations#ModelVersionsOperations.{}',
+ client_factory=cf_model_version)
+ with self.command_group('machinelearningservices model-version', machinelearningservices_model_version,
+ client_factory=cf_model_version) as g:
+ g.custom_command('list', 'machinelearningservices_model_version_list')
+ g.custom_show_command('show', 'machinelearningservices_model_version_show')
+ g.custom_command('create', 'machinelearningservices_model_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_model_vers'
+ 'ion_update')
+ g.custom_command('delete', 'machinelearningservices_model_version_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_online_deployment
+ machinelearningservices_online_deployment = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._online_deploym'
+ 'ents_operations#OnlineDeploymentsOperations.{}',
+ client_factory=cf_online_deployment)
+ with self.command_group('machinelearningservices online-deployment', machinelearningservices_online_deployment,
+ client_factory=cf_online_deployment) as g:
+ g.custom_command('list', 'machinelearningservices_online_deployment_list')
+ g.custom_show_command('show', 'machinelearningservices_online_deployment_show')
+ g.custom_command('create', 'machinelearningservices_online_deployment_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_online_deployment_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_online_deployment_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('get-log', 'machinelearningservices_online_deployment_get_log')
+ g.custom_wait_command('wait', 'machinelearningservices_online_deployment_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_online_endpoint
+ machinelearningservices_online_endpoint = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._online_endpoin'
+ 'ts_operations#OnlineEndpointsOperations.{}',
+ client_factory=cf_online_endpoint)
+ with self.command_group('machinelearningservices online-endpoint', machinelearningservices_online_endpoint,
+ client_factory=cf_online_endpoint) as g:
+ g.custom_command('list', 'machinelearningservices_online_endpoint_list')
+ g.custom_show_command('show', 'machinelearningservices_online_endpoint_show')
+ g.custom_command('create', 'machinelearningservices_online_endpoint_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_online_endpoint_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_online_endpoint_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('get-token', 'machinelearningservices_online_endpoint_get_token')
+ g.custom_command('list-key', 'machinelearningservices_online_endpoint_list_key')
+ g.custom_command('regenerate-key', 'machinelearningservices_online_endpoint_regenerate_key',
+ supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_online_endpoint_show')
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/custom.py b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
new file mode 100644
index 00000000000..8cdb8dcff24
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
@@ -0,0 +1,2304 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=line-too-long
+# pylint: disable=too-many-lines
+# pylint: disable=unused-argument
+
+from knack.util import CLIError
+from azure.cli.core.util import sdk_no_wait
+
+
+def machinelearningservices_workspace_list(client,
+ resource_group_name=None,
+ skiptoken=None):
+ if resource_group_name:
+ return client.list_by_resource_group(resource_group_name=resource_group_name,
+ skiptoken=skiptoken)
+ return client.list_by_subscription(skiptoken=skiptoken)
+
+
+def machinelearningservices_workspace_show(client,
+ resource_group_name,
+ workspace_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_create(client,
+ resource_group_name,
+ workspace_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ description=None,
+ friendly_name=None,
+ key_vault=None,
+ application_insights=None,
+ container_registry=None,
+ storage_account=None,
+ discovery_url=None,
+ hbi_workspace=None,
+ image_build_compute=None,
+ allow_public_access_when_behind_vnet=None,
+ shared_private_link_resources=None,
+ status=None,
+ key_vault_properties=None,
+ no_wait=False):
+ if hbi_workspace is None:
+ hbi_workspace = False
+ if allow_public_access_when_behind_vnet is None:
+ allow_public_access_when_behind_vnet = False
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['friendly_name'] = friendly_name
+ parameters['key_vault'] = key_vault
+ parameters['application_insights'] = application_insights
+ parameters['container_registry'] = container_registry
+ parameters['storage_account'] = storage_account
+ parameters['discovery_url'] = discovery_url
+ parameters['hbi_workspace'] = False if hbi_workspace is None else hbi_workspace
+ parameters['image_build_compute'] = image_build_compute
+ parameters['allow_public_access_when_behind_vnet'] = False if allow_public_access_when_behind_vnet is None else allow_public_access_when_behind_vnet
+ parameters['shared_private_link_resources'] = shared_private_link_resources
+ parameters['encryption'] = {}
+ parameters['encryption']['status'] = status
+ parameters['encryption']['key_vault_properties'] = key_vault_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_update(client,
+ resource_group_name,
+ workspace_name,
+ tags=None,
+ sku=None,
+ description=None,
+ friendly_name=None):
+ parameters = {}
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['description'] = description
+ parameters['friendly_name'] = friendly_name
+ return client.update(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_delete(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_resync_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.resync_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_feature_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_usage_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_virtual_machine_size_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_update(client,
+ location,
+ value=None):
+ parameters = {}
+ parameters['value'] = value
+ return client.update(location=location,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None):
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ skiptoken=skiptoken)
+
+
+def machinelearningservices_machine_learning_compute_show(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_aks_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ ak_s_compute_location=None,
+ ak_s_description=None,
+ ak_s_resource_id=None,
+ ak_s_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'Aks'
+ parameters['properties']['compute_location'] = ak_s_compute_location
+ parameters['properties']['description'] = ak_s_description
+ parameters['properties']['resource_id'] = ak_s_resource_id
+ parameters['properties']['properties'] = ak_s_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_aml_compute_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ aml_compute_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'AmlCompute'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = aml_compute_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_compute_instance_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ compute_instance_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'ComputeInstance'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = compute_instance_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_data_factory_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'DataFactory'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_data_lake_analytics_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ data_lake_store_account_name=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'DataLakeAnalytics'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['data_lake_store_account_name'] = data_lake_store_account_name
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_databricks_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ databricks_access_token=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'Databricks'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['databricks_access_token'] = databricks_access_token
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_hd_insight_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ ssh_port=None,
+ address=None,
+ administrator_account=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'HdInsight'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['ssh_port'] = ssh_port
+ parameters['properties']['properties']['address'] = address
+ parameters['properties']['properties']['administrator_account'] = administrator_account
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_virtual_machine_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ virtual_machine_size=None,
+ ssh_port=None,
+ address=None,
+ administrator_account=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'VirtualMachine'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['virtual_machine_size'] = virtual_machine_size
+ parameters['properties']['properties']['ssh_port'] = ssh_port
+ parameters['properties']['properties']['address'] = address
+ parameters['properties']['properties']['administrator_account'] = administrator_account
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_update(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ scale_settings=None,
+ no_wait=False):
+ parameters = {}
+ parameters['scale_settings'] = scale_settings
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_delete(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ underlying_resource_action,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action)
+
+
+def machinelearningservices_machine_learning_compute_list_key(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_list_node(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_nodes(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_restart(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.restart(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_start(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.start(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_stop(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.stop(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_list_sku(client):
+ return client.list_skus()
+
+
+def machinelearningservices_private_endpoint_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_put(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ private_link_service_connection_state=None):
+ properties = {}
+ properties['location'] = location
+ properties['tags'] = tags
+ properties['sku'] = sku
+ properties['identity'] = {}
+ properties['identity']['type'] = type_
+ properties['identity']['user_assigned_identities'] = user_assigned_identities
+ properties['private_link_service_connection_state'] = private_link_service_connection_state
+ return client.put(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ properties=properties)
+
+
+def machinelearningservices_private_link_resource_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_linked_service_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_linked_service_show(client,
+ resource_group_name,
+ workspace_name,
+ link_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ link_name=link_name)
+
+
+def machinelearningservices_linked_service_create(client,
+ resource_group_name,
+ workspace_name,
+ link_name,
+ name=None,
+ location=None,
+ properties=None,
+ type_=None,
+ user_assigned_identities=None):
+ parameters = {}
+ parameters['name'] = name
+ parameters['location'] = location
+ parameters['properties'] = properties
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.create(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ link_name=link_name,
+ parameters=parameters)
+
+
+def machinelearningservices_linked_service_delete(client,
+ resource_group_name,
+ workspace_name,
+ link_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ link_name=link_name)
+
+
+def machinelearningservices_machine_learning_service_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None,
+ model_id=None,
+ model_name=None,
+ tag=None,
+ tags=None,
+ properties=None,
+ run_id=None,
+ expand=None,
+ orderby=None):
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ skiptoken=skiptoken,
+ model_id=model_id,
+ model_name=model_name,
+ tag=tag,
+ tags=tags,
+ properties=properties,
+ run_id=run_id,
+ expand=expand,
+ orderby=orderby)
+
+
+def machinelearningservices_machine_learning_service_show(client,
+ resource_group_name,
+ workspace_name,
+ service_name,
+ expand=None):
+ if expand is None:
+ expand = False
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ expand=expand)
+
+
+def machinelearningservices_machine_learning_service_create(client,
+ resource_group_name,
+ workspace_name,
+ service_name,
+ properties,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties)
+
+
+def machinelearningservices_machine_learning_service_update(client,
+ resource_group_name,
+ workspace_name,
+ service_name,
+ properties,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties)
+
+
+def machinelearningservices_machine_learning_service_delete(client,
+ resource_group_name,
+ workspace_name,
+ service_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name)
+
+
+def machinelearningservices_notebook_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_notebook_prepare(client,
+ resource_group_name,
+ workspace_name):
+ return client.begin_prepare(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_connection_list(client,
+ resource_group_name,
+ workspace_name,
+ target=None,
+ category=None):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ target=target,
+ category=category)
+
+
+def machinelearningservices_workspace_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_workspace_connection_create(client,
+ resource_group_name,
+ workspace_name,
+ connection_name,
+ name=None,
+ category=None,
+ target=None,
+ auth_type=None,
+ value=None):
+ parameters = {}
+ parameters['name'] = name
+ parameters['category'] = category
+ parameters['target'] = target
+ parameters['auth_type'] = auth_type
+ parameters['value'] = value
+ return client.create(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_code_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None):
+ return client.list(skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ properties=None,
+ tags=None,
+ description=None):
+ body = {}
+ body['properties'] = properties
+ body['tags'] = tags
+ body['description'] = description
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_code_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ properties=None,
+ tags=None,
+ description=None):
+ if properties is not None:
+ instance.properties = properties
+ if tags is not None:
+ instance.tags = tags
+ if description is not None:
+ instance.description = description
+ return instance
+
+
+def machinelearningservices_code_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skiptoken=None):
+ return client.list(name=name,
+ order_by=order_by,
+ top=top,
+ skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ datastore_id=None,
+ asset_path=None,
+ path=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None):
+ body = {}
+ body['datastore_id'] = datastore_id
+ body['asset_path'] = asset_path
+ body['path'] = path
+ body['generated_by'] = generated_by
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_code_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ datastore_id=None,
+ asset_path=None,
+ path=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None):
+ if datastore_id is not None:
+ instance.datastore_id = datastore_id
+ if asset_path is not None:
+ instance.asset_path = asset_path
+ if path is not None:
+ instance.path = path
+ if generated_by is not None:
+ instance.generated_by = generated_by
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ return instance
+
+
+def machinelearningservices_code_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_component_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None):
+ return client.list(skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_component_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_component_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ tags=None,
+ properties=None):
+ body = {}
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_component_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ tags=None,
+ properties=None):
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ return instance
+
+
+def machinelearningservices_component_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_component_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skiptoken=None):
+ return client.list(name=name,
+ order_by=order_by,
+ top=top,
+ skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_component_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_component_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ code_configuration,
+ environment_id=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None,
+ display_name=None,
+ is_deterministic=None,
+ inputs=None,
+ outputs=None):
+ body = {}
+ body['environment_id'] = environment_id
+ body['code_configuration'] = code_configuration
+ body['generated_by'] = generated_by
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ body['component'] = {}
+ body['component']['component_type'] = "CommandComponent"
+ body['component']['display_name'] = display_name
+ body['component']['is_deterministic'] = is_deterministic
+ body['component']['inputs'] = inputs
+ body['component']['outputs'] = outputs
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_component_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ code_configuration,
+ environment_id=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None,
+ display_name=None,
+ is_deterministic=None,
+ inputs=None,
+ outputs=None):
+ if environment_id is not None:
+ instance.environment_id = environment_id
+ if code_configuration is not None:
+ instance.code_configuration = code_configuration
+ if generated_by is not None:
+ instance.generated_by = generated_by
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ if "CommandComponent" is not None:
+ instance.component.component_type = "CommandComponent"
+ if display_name is not None:
+ instance.component.display_name = display_name
+ if is_deterministic is not None:
+ instance.component.is_deterministic = is_deterministic
+ if inputs is not None:
+ instance.component.inputs = inputs
+ if outputs is not None:
+ instance.component.outputs = outputs
+ return instance
+
+
+def machinelearningservices_component_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None):
+ return client.list(skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ properties=None,
+ tags=None,
+ description=None):
+ body = {}
+ body['properties'] = properties
+ body['tags'] = tags
+ body['description'] = description
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_data_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ properties=None,
+ tags=None,
+ description=None):
+ if properties is not None:
+ instance.properties = properties
+ if tags is not None:
+ instance.tags = tags
+ if description is not None:
+ instance.description = description
+ return instance
+
+
+def machinelearningservices_data_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None,
+ count=None,
+ is_default=None,
+ names=None,
+ search_text=None,
+ order_by=None,
+ order_by_asc=None):
+ if count is None:
+ count = 30
+ if order_by_asc is None:
+ order_by_asc = False
+ return client.list(skiptoken=skiptoken,
+ count=count,
+ is_default=is_default,
+ names=names,
+ search_text=search_text,
+ order_by=order_by,
+ order_by_asc=order_by_asc,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ datastore_contents_type,
+ is_default=None,
+ linked_info=None,
+ properties=None,
+ description=None,
+ tags=None,
+ azure_data_lake=None,
+ azure_my_sql=None,
+ azure_postgre_sql=None,
+ azure_sql_database=None,
+ azure_storage=None,
+ gluster_fs=None):
+ body = {}
+ body['is_default'] = is_default
+ body['linked_info'] = linked_info
+ body['properties'] = properties
+ body['description'] = description
+ body['tags'] = tags
+ body['contents'] = {}
+ body['contents']['datastore_contents_type'] = datastore_contents_type
+ body['contents']['azure_data_lake'] = azure_data_lake
+ body['contents']['azure_my_sql'] = azure_my_sql
+ body['contents']['azure_postgre_sql'] = azure_postgre_sql
+ body['contents']['azure_sql_database'] = azure_sql_database
+ body['contents']['azure_storage'] = azure_storage
+ body['contents']['gluster_fs'] = gluster_fs
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_datastore_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ datastore_contents_type,
+ is_default=None,
+ linked_info=None,
+ properties=None,
+ description=None,
+ tags=None,
+ azure_data_lake=None,
+ azure_my_sql=None,
+ azure_postgre_sql=None,
+ azure_sql_database=None,
+ azure_storage=None,
+ gluster_fs=None):
+ if is_default is not None:
+ instance.is_default = is_default
+ if linked_info is not None:
+ instance.linked_info = linked_info
+ if properties is not None:
+ instance.properties = properties
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if datastore_contents_type is not None:
+ instance.contents.datastore_contents_type = datastore_contents_type
+ if azure_data_lake is not None:
+ instance.contents.azure_data_lake = azure_data_lake
+ if azure_my_sql is not None:
+ instance.contents.azure_my_sql = azure_my_sql
+ if azure_postgre_sql is not None:
+ instance.contents.azure_postgre_sql = azure_postgre_sql
+ if azure_sql_database is not None:
+ instance.contents.azure_sql_database = azure_sql_database
+ if azure_storage is not None:
+ instance.contents.azure_storage = azure_storage
+ if gluster_fs is not None:
+ instance.contents.gluster_fs = gluster_fs
+ return instance
+
+
+def machinelearningservices_datastore_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_list_secret(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.list_secrets(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skiptoken=None):
+ return client.list(name=name,
+ order_by=order_by,
+ top=top,
+ skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ dataset_type=None,
+ datastore_id=None,
+ asset_path=None,
+ path=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None):
+ body = {}
+ body['dataset_type'] = dataset_type
+ body['datastore_id'] = datastore_id
+ body['asset_path'] = asset_path
+ body['path'] = path
+ body['generated_by'] = generated_by
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_data_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ dataset_type=None,
+ datastore_id=None,
+ asset_path=None,
+ path=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None):
+ if dataset_type is not None:
+ instance.dataset_type = dataset_type
+ if datastore_id is not None:
+ instance.datastore_id = datastore_id
+ if asset_path is not None:
+ instance.asset_path = asset_path
+ if path is not None:
+ instance.path = path
+ if generated_by is not None:
+ instance.generated_by = generated_by
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ return instance
+
+
+def machinelearningservices_data_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None):
+ return client.list(skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ properties=None,
+ tags=None,
+ description=None):
+ body = {}
+ body['properties'] = properties
+ body['tags'] = tags
+ body['description'] = description
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_environment_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ properties=None,
+ tags=None,
+ description=None):
+ if properties is not None:
+ instance.properties = properties
+ if tags is not None:
+ instance.tags = tags
+ if description is not None:
+ instance.description = description
+ return instance
+
+
+def machinelearningservices_environment_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_specification_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skiptoken=None):
+ return client.list(name=name,
+ order_by=order_by,
+ top=top,
+ skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_specification_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_specification_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ docker_image=None,
+ docker_build=None,
+ conda_file=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None,
+ liveness_route=None,
+ readiness_route=None,
+ scoring_route=None):
+ all_docker = []
+ if docker_image is not None:
+ all_docker.append(docker_image)
+ if docker_build is not None:
+ all_docker.append(docker_build)
+ if len(all_docker) > 1:
+ raise CLIError('at most one of docker_image, docker_build is needed for docker!')
+ docker = all_docker[0] if len(all_docker) == 1 else None
+ body = {}
+ body['docker'] = docker
+ body['conda_file'] = conda_file
+ body['generated_by'] = generated_by
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ body['inference_container_properties'] = {}
+ body['inference_container_properties']['liveness_route'] = liveness_route
+ body['inference_container_properties']['readiness_route'] = readiness_route
+ body['inference_container_properties']['scoring_route'] = scoring_route
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_environment_specification_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ docker_image=None,
+ docker_build=None,
+ conda_file=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None,
+ liveness_route=None,
+ readiness_route=None,
+ scoring_route=None):
+ all_docker = []
+ if docker_image is not None:
+ all_docker.append(docker_image)
+ if docker_build is not None:
+ all_docker.append(docker_build)
+ if len(all_docker) > 1:
+ raise CLIError('at most one of docker_image, docker_build is needed for docker!')
+ docker = all_docker[0] if len(all_docker) == 1 else None
+ if docker is not None:
+ instance.docker = docker
+ if conda_file is not None:
+ instance.conda_file = conda_file
+ if generated_by is not None:
+ instance.generated_by = generated_by
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ if liveness_route is not None:
+ instance.inference_container_properties.liveness_route = liveness_route
+ if readiness_route is not None:
+ instance.inference_container_properties.readiness_route = readiness_route
+ if scoring_route is not None:
+ instance.inference_container_properties.scoring_route = scoring_route
+ return instance
+
+
+def machinelearningservices_environment_specification_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None,
+ job_type=None,
+ tags=None,
+ tag=None):
+ return client.list(skiptoken=skiptoken,
+ job_type=job_type,
+ tags=tags,
+ tag=tag,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_show(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.get(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_create(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ properties):
+ body = {}
+ body['properties'] = properties
+ return client.create_or_update(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_job_update(instance,
+ id_,
+ resource_group_name,
+ workspace_name,
+ properties):
+ if properties is not None:
+ instance.properties = properties
+ return instance
+
+
+def machinelearningservices_job_delete(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_cancel(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.cancel(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None,
+ count=None):
+ return client.list(skiptoken=skiptoken,
+ count=count,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_show(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ include_job_instructions=None,
+ include_label_categories=None):
+ return client.get(id=id_,
+ include_job_instructions=include_job_instructions,
+ include_label_categories=include_label_categories,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_create(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ tags=None,
+ properties=None,
+ label_categories=None,
+ dataset_configuration=None,
+ labeling_job_image_properties=None,
+ labeling_job_text_properties=None,
+ inferencing_compute_binding=None,
+ training_compute_binding=None,
+ ml_assist_enabled=None,
+ uri=None,
+ no_wait=False):
+ all_labeling_job_media_properties = []
+ if labeling_job_image_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_image_properties)
+ if labeling_job_text_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_text_properties)
+ if len(all_labeling_job_media_properties) > 1:
+ raise CLIError('at most one of labeling_job_image_properties, labeling_job_text_properties is needed for '
+ 'labeling_job_media_properties!')
+ labeling_job_media_properties = all_labeling_job_media_properties[0] if len(all_labeling_job_media_properties) == \
+ 1 else None
+ body = {}
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ body['label_categories'] = label_categories
+ body['dataset_configuration'] = dataset_configuration
+ body['labeling_job_media_properties'] = labeling_job_media_properties
+ body['ml_assist_configuration'] = {}
+ body['ml_assist_configuration']['inferencing_compute_binding'] = inferencing_compute_binding
+ body['ml_assist_configuration']['training_compute_binding'] = training_compute_binding
+ body['ml_assist_configuration']['ml_assist_enabled'] = ml_assist_enabled
+ body['job_instructions'] = {}
+ body['job_instructions']['uri'] = uri
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_labeling_job_update(instance,
+ id_,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ tags=None,
+ properties=None,
+ label_categories=None,
+ dataset_configuration=None,
+ labeling_job_image_properties=None,
+ labeling_job_text_properties=None,
+ inferencing_compute_binding=None,
+ training_compute_binding=None,
+ ml_assist_enabled=None,
+ uri=None,
+ no_wait=False):
+ all_labeling_job_media_properties = []
+ if labeling_job_image_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_image_properties)
+ if labeling_job_text_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_text_properties)
+ if len(all_labeling_job_media_properties) > 1:
+ raise CLIError('at most one of labeling_job_image_properties, labeling_job_text_properties is needed for '
+ 'labeling_job_media_properties!')
+ labeling_job_media_properties = all_labeling_job_media_properties[0] if len(all_labeling_job_media_properties) == \
+ 1 else None
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ if label_categories is not None:
+ instance.label_categories = label_categories
+ if dataset_configuration is not None:
+ instance.dataset_configuration = dataset_configuration
+ if labeling_job_media_properties is not None:
+ instance.labeling_job_media_properties = labeling_job_media_properties
+ if inferencing_compute_binding is not None:
+ instance.ml_assist_configuration.inferencing_compute_binding = inferencing_compute_binding
+ if training_compute_binding is not None:
+ instance.ml_assist_configuration.training_compute_binding = training_compute_binding
+ if ml_assist_enabled is not None:
+ instance.ml_assist_configuration.ml_assist_enabled = ml_assist_enabled
+ if uri is not None:
+ instance.job_instructions.uri = uri
+ return instance
+
+
+def machinelearningservices_labeling_job_delete(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.delete(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_export_label(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ coco_export_summary=None,
+ csv_export_summary=None,
+ dataset_export_summary=None,
+ no_wait=False):
+ all_body = []
+ if coco_export_summary is not None:
+ all_body.append(coco_export_summary)
+ if csv_export_summary is not None:
+ all_body.append(csv_export_summary)
+ if dataset_export_summary is not None:
+ all_body.append(dataset_export_summary)
+ if len(all_body) > 1:
+ raise CLIError('at most one of coco_export_summary, csv_export_summary, dataset_export_summary is needed for '
+ 'body!')
+ if len(all_body) != 1:
+ raise CLIError('body is required. but none of coco_export_summary, csv_export_summary, dataset_export_summary '
+ 'is provided!')
+ body = all_body[0] if len(all_body) == 1 else None
+ return sdk_no_wait(no_wait,
+ client.begin_export_labels,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_labeling_job_pause(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.pause(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_resume(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_resume,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None,
+ count=None):
+ return client.list(skiptoken=skiptoken,
+ count=count,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ tags=None,
+ properties=None):
+ body = {}
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_model_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ tags=None,
+ properties=None):
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ return instance
+
+
+def machinelearningservices_model_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None,
+ order_by=None,
+ top=None,
+ version=None,
+ description=None,
+ offset=None,
+ tags=None,
+ properties=None):
+ return client.list(name=name,
+ skiptoken=skiptoken,
+ order_by=order_by,
+ top=top,
+ version=version,
+ description=description,
+ offset=offset,
+ tags=tags,
+ properties=properties,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ stage=None,
+ flavors=None,
+ datastore_id=None,
+ asset_path=None,
+ path=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None):
+ body = {}
+ body['stage'] = stage
+ body['flavors'] = flavors
+ body['datastore_id'] = datastore_id
+ body['asset_path'] = asset_path
+ body['path'] = path
+ body['generated_by'] = generated_by
+ body['description'] = description
+ body['tags'] = tags
+ body['properties'] = properties
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_model_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ stage=None,
+ flavors=None,
+ datastore_id=None,
+ asset_path=None,
+ path=None,
+ generated_by=None,
+ description=None,
+ tags=None,
+ properties=None):
+ if stage is not None:
+ instance.stage = stage
+ if flavors is not None:
+ instance.flavors = flavors
+ if datastore_id is not None:
+ instance.datastore_id = datastore_id
+ if asset_path is not None:
+ instance.asset_path = asset_path
+ if path is not None:
+ instance.path = path
+ if generated_by is not None:
+ instance.generated_by = generated_by
+ if description is not None:
+ instance.description = description
+ if tags is not None:
+ instance.tags = tags
+ if properties is not None:
+ instance.properties = properties
+ return instance
+
+
+def machinelearningservices_model_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_deployment_list(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skiptoken=None):
+ return client.list(endpoint_name=endpoint_name,
+ order_by=order_by,
+ top=top,
+ skiptoken=skiptoken,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_deployment_show(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name):
+ return client.get(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_deployment_create(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ location,
+ deployment_configuration,
+ tags=None,
+ kind=None,
+ scale_settings=None,
+ description=None,
+ properties=None,
+ id_asset_reference=None,
+ data_path_asset_reference=None,
+ output_path_asset_reference=None,
+ code_configuration=None,
+ environment_id=None,
+ environment_variables=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ all_model_reference = []
+ if id_asset_reference is not None:
+ all_model_reference.append(id_asset_reference)
+ if data_path_asset_reference is not None:
+ all_model_reference.append(data_path_asset_reference)
+ if output_path_asset_reference is not None:
+ all_model_reference.append(output_path_asset_reference)
+ if len(all_model_reference) > 1:
+ raise CLIError('at most one of id_asset_reference, data_path_asset_reference, output_path_asset_reference is '
+ 'needed for model_reference!')
+ if len(all_model_reference) != 1:
+ raise CLIError('model_reference is required. but none of id_asset_reference, data_path_asset_reference, '
+ 'output_path_asset_reference is provided!')
+ model_reference = all_model_reference[0] if len(all_model_reference) == 1 else None
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['scale_settings'] = scale_settings
+ body['deployment_configuration'] = deployment_configuration
+ body['description'] = description
+ body['properties'] = properties
+ body['model_reference'] = model_reference
+ body['code_configuration'] = code_configuration
+ body['environment_id'] = environment_id
+ body['environment_variables'] = environment_variables
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_deployment_update(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ tags=None,
+ location=None,
+ kind=None,
+ scale_settings=None,
+ deployment_configuration=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['properties'] = {}
+ body['properties']['scale_settings'] = scale_settings
+ body['properties']['deployment_configuration'] = deployment_configuration
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_deployment_delete(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_deployment_get_log(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ container_type=None,
+ tail=None):
+ body = {}
+ body['container_type'] = container_type
+ body['tail'] = tail
+ return client.get_logs(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_endpoint_list(client,
+ resource_group_name,
+ workspace_name,
+ name=None,
+ count=None,
+ compute_type=None,
+ skiptoken=None,
+ tags=None,
+ properties=None,
+ order_by=None):
+ return client.list(name=name,
+ count=count,
+ compute_type=compute_type,
+ skiptoken=skiptoken,
+ tags=tags,
+ properties=properties,
+ order_by=order_by,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_show(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.get(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_create(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ location,
+ auth_mode,
+ tags=None,
+ kind=None,
+ description=None,
+ properties=None,
+ traffic_rules=None,
+ aks_compute_configuration=None,
+ managed_compute_configuration=None,
+ azure_ml_compute_configuration=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ all_compute_configuration = []
+ if aks_compute_configuration is not None:
+ all_compute_configuration.append(aks_compute_configuration)
+ if managed_compute_configuration is not None:
+ all_compute_configuration.append(managed_compute_configuration)
+ if azure_ml_compute_configuration is not None:
+ all_compute_configuration.append(azure_ml_compute_configuration)
+ if len(all_compute_configuration) > 1:
+ raise CLIError('at most one of aks_compute_configuration, managed_compute_configuration, '
+ 'azure_ml_compute_configuration is needed for compute_configuration!')
+ if len(all_compute_configuration) != 1:
+ raise CLIError('compute_configuration is required. but none of aks_compute_configuration, '
+ 'managed_compute_configuration, azure_ml_compute_configuration is provided!')
+ compute_configuration = all_compute_configuration[0] if len(all_compute_configuration) == 1 else None
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['description'] = description
+ body['properties'] = properties
+ body['traffic_rules'] = traffic_rules
+ body['compute_configuration'] = compute_configuration
+ body['auth_mode'] = auth_mode
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_endpoint_update(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ tags=None,
+ location=None,
+ kind=None,
+ traffic_rules=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['properties'] = {}
+ body['properties']['traffic_rules'] = traffic_rules
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_endpoint_delete(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_get_token(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.get_token(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_list_key(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_regenerate_key(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ key_type,
+ key_value=None,
+ no_wait=False):
+ body = {}
+ body['key_type'] = key_type
+ body['key_value'] = key_value
+ return sdk_no_wait(no_wait,
+ client.begin_regenerate_keys,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
diff --git a/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
new file mode 100644
index 00000000000..70488e93851
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
@@ -0,0 +1,116 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+import inspect
+import logging
+import os
+import sys
+import traceback
+import datetime as dt
+
+from azure.core.exceptions import AzureError
+from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError
+
+
+logger = logging.getLogger('azure.cli.testsdk')
+logger.addHandler(logging.StreamHandler())
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+exceptions = []
+test_map = dict()
+SUCCESSED = "successed"
+FAILED = "failed"
+
+
+def try_manual(func):
+ def import_manual_function(origin_func):
+ from importlib import import_module
+ decorated_path = inspect.getfile(origin_func).lower()
+ module_path = __path__[0].lower()
+ if not decorated_path.startswith(module_path):
+ raise Exception("Decorator can only be used in submodules!")
+ manual_path = os.path.join(
+ decorated_path[module_path.rfind(os.path.sep) + 1:])
+ manual_file_path, manual_file_name = os.path.split(manual_path)
+ module_name, _ = os.path.splitext(manual_file_name)
+ manual_module = "..manual." + \
+ ".".join(manual_file_path.split(os.path.sep) + [module_name, ])
+ return getattr(import_module(manual_module, package=__name__), origin_func.__name__)
+
+ def get_func_to_call():
+ func_to_call = func
+ try:
+ func_to_call = import_manual_function(func)
+ logger.info("Found manual override for %s(...)", func.__name__)
+ except (ImportError, AttributeError):
+ pass
+ return func_to_call
+
+ def wrapper(*args, **kwargs):
+ func_to_call = get_func_to_call()
+ logger.info("running %s()...", func.__name__)
+ try:
+ test_map[func.__name__] = dict()
+ test_map[func.__name__]["result"] = SUCCESSED
+ test_map[func.__name__]["error_message"] = ""
+ test_map[func.__name__]["error_stack"] = ""
+ test_map[func.__name__]["error_normalized"] = ""
+ test_map[func.__name__]["start_dt"] = dt.datetime.utcnow()
+ ret = func_to_call(*args, **kwargs)
+ except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit,
+ JMESPathCheckAssertionError) as e:
+ use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE")
+ if use_exception_cache is None or use_exception_cache.lower() != "true":
+ raise
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ test_map[func.__name__]["result"] = FAILED
+ test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500]
+ test_map[func.__name__]["error_stack"] = traceback.format_exc().replace(
+ "\r\n", " ").replace("\n", " ")[:500]
+ logger.info("--------------------------------------")
+ logger.info("step exception: %s", e)
+ logger.error("--------------------------------------")
+ logger.error("step exception in %s: %s", func.__name__, e)
+ logger.info(traceback.format_exc())
+ exceptions.append((func.__name__, sys.exc_info()))
+ else:
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ return ret
+
+ if inspect.isclass(func):
+ return get_func_to_call()
+ return wrapper
+
+
+def calc_coverage(filename):
+ filename = filename.split(".")[0]
+ coverage_name = filename + "_coverage.md"
+ with open(coverage_name, "w") as f:
+ f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n")
+ total = len(test_map)
+ covered = 0
+ for k, v in test_map.items():
+ if not k.startswith("step_"):
+ total -= 1
+ continue
+ if v["result"] == SUCCESSED:
+ covered += 1
+ f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|"
+ "{end_dt}|\n".format(step_name=k, **v))
+ f.write("Coverage: {}/{}\n".format(covered, total))
+ print("Create coverage\n", file=sys.stderr)
+
+
+def raise_if():
+ if exceptions:
+ if len(exceptions) <= 1:
+ raise exceptions[0][1][1]
+ message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1]))
+ message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]])
+ raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2])
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
new file mode 100644
index 00000000000..5d02706aaf6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
@@ -0,0 +1,1662 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+from .. import try_manual
+
+
+# EXAMPLE: /Workspaces/put/Create Workspace
+@try_manual
+def step_workspace_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace create '
+ '--type "SystemAssigned" '
+ '--location "eastus2euap" '
+ '--description "test description" '
+ '--application-insights "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.insights'
+ '/components/testinsights" '
+ '--container-registry "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ContainerR'
+ 'egistry/registries/testRegistry" '
+ '--key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/'
+ 'aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/'
+ 'providers/Microsoft.KeyVault/vaults/testkv" '
+ '--status "Enabled" '
+ '--friendly-name "HelloName" '
+ '--hbi-workspace false '
+ '--key-vault "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vaults/tes'
+ 'tkv" '
+ '--shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/{subscript'
+ 'ion_id}/resourceGroups/{rg}/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkRes'
+ 'ources/{myPrivateLinkResource}" group-id="{myPrivateLinkResource}" request-message="Please approve" '
+ 'status="Approved" '
+ '--storage-account "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storage/sto'
+ 'rageAccounts/{sa}" '
+ '--sku name="Basic" tier="Basic" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=[])
+ test.cmd('az machinelearningservices workspace wait --created '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspace
+@try_manual
+def step_workspace_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace show '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by Resource Group
+@try_manual
+def step_workspace_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by subscription
+@try_manual
+def step_workspace_list2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '-g ""',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/patch/Update Workspace
+@try_manual
+def step_workspace_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace update '
+ '--description "new description" '
+ '--friendly-name "New friendly name" '
+ '--sku name="Enterprise" tier="Enterprise" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/List Workspace Keys
+@try_manual
+def step_workspace_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Resync Workspace Keys
+@try_manual
+def step_workspace_resync_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace resync-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/put/CreateOrUpdate Code Container.
+@try_manual
+def step_code_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="string" tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/get/Get Code Container.
+@try_manual
+def step_code_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/get/List Code Container.
+@try_manual
+def step_code_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container list '
+ '--skiptoken "skiptoken" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/delete/Delete Code Container.
+@try_manual
+def step_code_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/put/CreateOrUpdate Code Version.
+@try_manual
+def step_code_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="string" assetPath={{"path":"string","isDirectory":true}} datastoreId="string" '
+ 'properties={{"prop1":"value1","prop2":"value2"}} tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/get/Get Code Version.
+@try_manual
+def step_code_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/get/List Code Version.
+@try_manual
+def step_code_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version list '
+ '--name "{myMachinelearningservice3}" '
+ '--skiptoken "skiptoken" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/delete/Delete Code Version.
+@try_manual
+def step_code_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentContainers/put/CreateOrUpdate Component Container.
+@try_manual
+def step_component_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-container create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="string" properties={{"additionalProp1":"string","additionalProp2":"string","add'
+ 'itionalProp3":"string"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"'
+ 'string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentContainers/get/Get Component Container.
+@try_manual
+def step_component_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-container show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentContainers/get/List Component Container.
+@try_manual
+def step_component_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-container list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentContainers/delete/Delete Component Container.
+@try_manual
+def step_component_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-container delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentVersions/put/CreateOrUpdate Component Version.
+@try_manual
+def step_component_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-version create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="string" codeConfiguration={{"codeArtifactId":"string","command":"string"}} '
+ 'component={{"componentType":"CommandComponent","displayName":"string","inputs":{{"additionalProp1":{{"des'
+ 'cription":"string","default":"string","componentInputType":"Generic","dataType":"string","optional":true}'
+ '},"additionalProp2":{{"description":"string","default":"string","componentInputType":"Generic","dataType"'
+ ':"string","optional":true}},"additionalProp3":{{"description":"string","default":"string","componentInput'
+ 'Type":"Generic","dataType":"string","optional":true}}}},"isDeterministic":true,"outputs":{{"additionalPro'
+ 'p1":{{"description":"string","dataType":"string"}},"additionalProp2":{{"description":"string","dataType":'
+ '"string"}},"additionalProp3":{{"description":"string","dataType":"string"}}}}}} '
+ 'environmentId="\\"/subscriptions/{{{{subscriptionId}}}}/resourceGroups/{{{{resourceGroup}}}}/providers/Mi'
+ 'crosoft.MachineLearningServices/workspaces/{{{{workspaceName}}}}/Environments/AzureML-Minimal\\"" '
+ 'generatedBy="User" properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"'
+ 'string"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentVersions/get/Get Component Version.
+@try_manual
+def step_component_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-version show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentVersions/get/List Component Version.
+@try_manual
+def step_component_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-version list '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ComponentVersions/delete/Delete Component Version.
+@try_manual
+def step_component_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices component-version delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/put/CreateOrUpdate Data Container.
+@try_manual
+def step_data_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container create '
+ '--name "{myMachinelearningservice4}" '
+ '--properties description="string" properties={{"properties1":"value1","properties2":"value2"}} '
+ 'tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/get/Get Data Container.
+@try_manual
+def step_data_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container show '
+ '--name "{myMachinelearningservice4}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/get/List Data Container.
+@try_manual
+def step_data_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/delete/Delete Data Container.
+@try_manual
+def step_data_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container delete -y '
+ '--name "{myMachinelearningservice4}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/put/Create or update datastore.
+@try_manual
+def step_datastore_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore create '
+ '--name "{myMachinelearningservice5}" '
+ '--properties description="string" contents={{"azureDataLake":{{"credentials":{{"accountKey":{{"key":"stri'
+ 'ng"}},"certificate":{{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-4562-b3fc-'
+ '2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"stri'
+ 'ng"}},"datastoreCredentialsType":"AccountKey","sas":{{"sasToken":"string"}},"servicePrincipal":{{"authori'
+ 'tyUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"'
+ 'string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"}},"sqlAdmin":{{"password":"string","userId":"st'
+ 'ring"}}}},"storeName":"string"}},"azureMySql":{{"credentials":{{"accountKey":{{"key":"string"}},"certific'
+ 'ate":{{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","'
+ 'resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"}},"datastor'
+ 'eCredentialsType":"AccountKey","sas":{{"sasToken":"string"}},"servicePrincipal":{{"authorityUrl":"string"'
+ ',"clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenant'
+ 'Id":"3fa85f64-5717-4562-b3fc-2c963f66afa6"}},"sqlAdmin":{{"password":"string","userId":"string"}}}},"data'
+ 'baseName":"string","endpoint":"database.windows.net","portNumber":0,"serverName":"string"}},"azurePostgre'
+ 'Sql":{{"credentials":{{"accountKey":{{"key":"string"}},"certificate":{{"authorityUrl":"string","certifica'
+ 'te":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f6'
+ '4-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"}},"datastoreCredentialsType":"AccountKey","sas":{{"s'
+ 'asToken":"string"}},"servicePrincipal":{{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c96'
+ '3f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6'
+ '"}},"sqlAdmin":{{"password":"string","userId":"string"}}}},"databaseName":"string","enableSSL":true,"endp'
+ 'oint":"database.windows.net","portNumber":0,"serverName":"string"}},"azureSqlDatabase":{{"credentials":{{'
+ '"accountKey":{{"key":"string"}},"certificate":{{"authorityUrl":"string","certificate":"string","clientId"'
+ ':"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f'
+ '66afa6","thumbprint":"string"}},"datastoreCredentialsType":"AccountKey","sas":{{"sasToken":"string"}},"se'
+ 'rvicePrincipal":{{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret'
+ '":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"}},"sqlAdmin":{{"passw'
+ 'ord":"string","userId":"string"}}}},"databaseName":"string","endpoint":"database.windows.net","portNumber'
+ '":0,"serverName":"string"}},"azureStorage":{{"accountName":"string","blobCacheTimeout":0,"containerName":'
+ '"string","credentials":{{"accountKey":{{"key":"string"}},"certificate":{{"authorityUrl":"string","certifi'
+ 'cate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85'
+ 'f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"}},"datastoreCredentialsType":"AccountKey","sas":{{'
+ '"sasToken":"string"}},"servicePrincipal":{{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c'
+ '963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66af'
+ 'a6"}},"sqlAdmin":{{"password":"string","userId":"string"}}}},"endpoint":"core.windows.net","protocol":"ht'
+ 'tps"}},"datastoreContentsType":"AzureBlob","glusterFs":{{"serverAddress":"string","volumeName":"string"}}'
+ '}} isDefault=true linkedInfo={{"linkedId":"string","linkedResourceName":"string","origin":"Synapse"}} '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/get/Get datastore.
+@try_manual
+def step_datastore_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore show '
+ '--name "{myMachinelearningservice5}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/get/List datastores.
+@try_manual
+def step_datastore_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/post/Get datastore secrets.
+@try_manual
+def step_datastore_list_secret(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore list-secret '
+ '--name "{myMachinelearningservice5}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/delete/Delete datastore.
+@try_manual
+def step_datastore_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore delete -y '
+ '--name "{myMachinelearningservice5}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/put/CreateOrUpdate Data Version.
+@try_manual
+def step_data_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version create '
+ '--name "{myMachinelearningservice6}" '
+ '--properties description="string" assetPath={{"path":"string","isDirectory":false}} datasetType="Simple" '
+ 'datastoreId="string" properties={{"properties1":"value1","properties2":"value2"}} '
+ 'tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--version "456" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/get/Get Data Version.
+@try_manual
+def step_data_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version show '
+ '--name "{myMachinelearningservice6}" '
+ '--resource-group "{rg_3}" '
+ '--version "456" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/get/List Data Version.
+@try_manual
+def step_data_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version list '
+ '--name "{myMachinelearningservice6}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/delete/Delete Data Version.
+@try_manual
+def step_data_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version delete -y '
+ '--name "{myMachinelearningservice6}" '
+ '--resource-group "{rg_3}" '
+ '--version "456" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/put/CreateOrUpdate Environment Container.
+@try_manual
+def step_environment_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="string" properties={{"additionalProp1":"string","additionalProp2":"string","add'
+ 'itionalProp3":"string"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"'
+ 'string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/get/Get Environment Container.
+@try_manual
+def step_environment_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/get/List Environment Container.
+@try_manual
+def step_environment_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container list '
+ '--skiptoken "skiptoken" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/delete/Delete Environment Container.
+@try_manual
+def step_environment_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/put/CreateOrUpdate Environment Specification Version.
+@try_manual
+def step_environment_specification_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="string" condaFile="string" docker={{"dockerSpecificationType":"Build","dockerfi'
+ 'le":"string"}} properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"stri'
+ 'ng"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/get/Get Environment Specification Version.
+@try_manual
+def step_environment_specification_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/get/List Environment Specification Version.
+@try_manual
+def step_environment_specification_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version list '
+ '--name "{myMachinelearningservice3}" '
+ '--skiptoken "skiptoken" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/delete/Delete Environment Specification Version.
+@try_manual
+def step_environment_specification_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/put/CreateOrUpdate Command Job.
+@try_manual
+def step_job_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job create '
+ '--properties "{{\\"description\\":\\"string\\",\\"properties\\":{{\\"additionalProp1\\":\\"string\\",\\"a'
+ 'dditionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}},\\"tags\\":{{\\"additionalProp1\\":\\'
+ '"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}}}}" '
+ '--id "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/put/CreateOrUpdate Sweep Job.
+@try_manual
+def step_job_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_job_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /Jobs/get/Get Command Job.
+@try_manual
+def step_job_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job show '
+ '--id "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/get/Get Sweep Job.
+@try_manual
+def step_job_show2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_job_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /Jobs/get/List Command Job.
+@try_manual
+def step_job_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job list '
+ '--skiptoken "skiptoken" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/get/List Sweep Job.
+@try_manual
+def step_job_list2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_job_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /Jobs/post/Cancel Command Job.
+@try_manual
+def step_job_cancel(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job cancel '
+ '--id "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/post/Cancel Sweep Job.
+@try_manual
+def step_job_cancel2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_job_cancel(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /Jobs/delete/Delete Command Job.
+@try_manual
+def step_job_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job delete -y '
+ '--id "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/delete/Delete Sweep Job.
+@try_manual
+def step_job_delete2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_job_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /LabelingJobs/put/CreateOrUpdate Labeling Job.
+@try_manual
+def step_labeling_job_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job create '
+ '--properties description="string" datasetConfiguration={{"assetName":"string","datasetVersion":"string","'
+ 'incrementalDatasetRefreshEnabled":true}} jobInstructions={{"uri":"string"}} jobType="Labeling" '
+ 'labelCategories={{"additionalProp1":{{"allowMultiSelect":true,"classes":{{"additionalProp1":{{"displayNam'
+ 'e":"string","subclasses":{{}}}},"additionalProp2":{{"displayName":"string","subclasses":{{}}}},"additiona'
+ 'lProp3":{{"displayName":"string","subclasses":{{}}}}}},"displayName":"string"}},"additionalProp2":{{"allo'
+ 'wMultiSelect":true,"classes":{{"additionalProp1":{{"displayName":"string","subclasses":{{}}}},"additional'
+ 'Prop2":{{"displayName":"string","subclasses":{{}}}},"additionalProp3":{{"displayName":"string","subclasse'
+ 's":{{}}}}}},"displayName":"string"}},"additionalProp3":{{"allowMultiSelect":true,"classes":{{"additionalP'
+ 'rop1":{{"displayName":"string","subclasses":{{}}}},"additionalProp2":{{"displayName":"string","subclasses'
+ '":{{}}}},"additionalProp3":{{"displayName":"string","subclasses":{{}}}}}},"displayName":"string"}}}} '
+ 'labelingJobMediaProperties={{"mediaType":"Image"}} mlAssistConfiguration={{"inferencingComputeBinding":{{'
+ '"computeId":"string","nodeCount":0}},"mlAssistEnabled":true,"trainingComputeBinding":{{"computeId":"strin'
+ 'g","nodeCount":0}}}} properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3"'
+ ':"string"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/get/Get Labeling Job.
+@try_manual
+def step_labeling_job_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job show '
+ '--id "testLabelingJob" '
+ '--include-job-instructions true '
+ '--include-label-categories true '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/get/List Labeling Job.
+@try_manual
+def step_labeling_job_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job list '
+ '--skiptoken "skiptoken" '
+ '--count "10" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/post/ExportLabels Labeling Job.
+@try_manual
+def step_labeling_job_export_label(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job export-label '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/post/Pause Labeling Job.
+@try_manual
+def step_labeling_job_pause(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job pause '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/post/Resume Labeling Job.
+@try_manual
+def step_labeling_job_resume(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job resume '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/delete/Delete Labeling Job.
+@try_manual
+def step_labeling_job_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job delete -y '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LinkedServices/put/CreateLinkedService
+@try_manual
+def step_linked_service_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices linked-service create '
+ '--link-name "{myMachinelearningservice}" '
+ '--name "{myMachinelearningservice}" '
+ '--type "SystemAssigned" '
+ '--location "westus" '
+ '--properties linked-service-resource-id="/subscriptions/{subscription_id}/resourceGroups/{rg_7}/providers'
+ '/Microsoft.Synapse/workspaces/{myWorkspace6}" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /LinkedServices/get/GetLinkedService
+@try_manual
+def step_linked_service_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices linked-service show '
+ '--link-name "{myMachinelearningservice}" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /LinkedServices/get/ListLinkedServices
+@try_manual
+def step_linked_service_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices linked-service list '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /LinkedServices/delete/DeleteLinkedService
+@try_manual
+def step_linked_service_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices linked-service delete -y '
+ '--link-name "{myMachinelearningservice}" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute
+@try_manual
+def step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\"'
+ ',\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNo'
+ 'deCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"virtualMachineImage\\":{{\\"id\\":\\"/subs'
+ 'criptions/{subscription_id}/resourceGroups/{rg_4}/providers/Microsoft.Compute/galleries/myImageGallery/im'
+ 'ages/myImageDefinition/versions/0.0.1\\"}},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6'
+ '\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute
+@try_manual
+def step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute
+@try_manual
+def step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create an ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\"'
+ ':\\"personal\\",\\"personalComputeInstanceSettings\\":{{\\"assignedUser\\":{{\\"objectId\\":\\"00000000-0'
+ '000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}}}},\\"sshSetting'
+ 's\\":{{\\"sshPublicAccess\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"'
+ 'STANDARD_NC6\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create an ComputeInstance Compute with minimal inputs
+@try_manual
+def step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get a AKS Compute
+@try_manual
+def step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute show '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get a AML Compute
+@try_manual
+def step_machine_learning_compute_show2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get an ComputeInstance
+@try_manual
+def step_machine_learning_compute_show3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ return step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get Computes
+@try_manual
+def step_machine_learning_compute_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/patch/Update a AmlCompute Compute
+@try_manual
+def step_machine_learning_compute_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute update '
+ '--compute-name "compute123" '
+ '--scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Get compute nodes information for a compute
+@try_manual
+def step_machine_learning_compute_list_node(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list-node '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/List AKS Compute Keys
+@try_manual
+def step_machine_learning_compute_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list-key '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Restart ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_restart(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute restart '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Start ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_start(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute start '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Stop ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_stop(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute stop '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/delete/Delete Compute
+@try_manual
+def step_machine_learning_compute_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute delete -y '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--underlying-resource-action "Delete" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/put/Create Or Update service
+@try_manual
+def step_machine_learning_service_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service create '
+ '--properties "{{\\"appInsightsEnabled\\":true,\\"authEnabled\\":true,\\"computeType\\":\\"ACI\\",\\"conta'
+ 'inerResourceRequirements\\":{{\\"cpu\\":1,\\"memoryInGB\\":1}},\\"environmentImageRequest\\":{{\\"assets'
+ '\\":[{{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":false,\\"url\\":\\"aml://stor'
+ 'age/azureml/score.py\\"}}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{{\\"name\\":\\"AzureML-S'
+ 'cikit-learn-0.20.3\\",\\"docker\\":{{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azure'
+ 'ml/base:openmpi3.1.2-ubuntu16.04\\",\\"baseImageRegistry\\":{{\\"address\\":null,\\"password\\":null,\\"u'
+ 'sername\\":null}}}},\\"environmentVariables\\":{{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"}},\\"inferenci'
+ 'ngStackVersion\\":null,\\"python\\":{{\\"baseCondaEnvironment\\":null,\\"condaDependencies\\":{{\\"name\\'
+ '":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"channels\\":[\\"conda-forge\\"],\\"dependencies\\":['
+ '\\"python=3.6.2\\",{{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml-defaults==1.0.69\\",\\"azureml-tele'
+ 'metry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-train-core==1.0.69\\",\\"s'
+ 'cikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}}]}},\\"interpreterPa'
+ 'th\\":\\"python\\",\\"userManagedDependencies\\":false}},\\"spark\\":{{\\"packages\\":[],\\"precachePacka'
+ 'ges\\":true,\\"repositories\\":[]}},\\"version\\":\\"3\\"}},\\"models\\":[{{\\"name\\":\\"sklearn_regress'
+ 'ion_model.pkl\\",\\"mimeType\\":\\"application/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_reg'
+ 'ression_model.pkl\\"}}]}},\\"location\\":\\"eastus2\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--service-name "service456" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/get/Get Service
+@try_manual
+def step_machine_learning_service_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service show '
+ '--resource-group "{rg_3}" '
+ '--service-name "service123" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/get/Get Services
+@try_manual
+def step_machine_learning_service_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/delete/Delete Service
+@try_manual
+def step_machine_learning_service_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service delete -y '
+ '--resource-group "{rg_3}" '
+ '--service-name "service123" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /machinelearningservices/get/List Skus
+@try_manual
+def step_list_sku(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices list-sku',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/put/CreateOrUpdate Model Container.
+@try_manual
+def step_model_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="Model container description" tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/get/Get Model Container.
+@try_manual
+def step_model_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/get/List Model Container.
+@try_manual
+def step_model_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/delete/Delete Model Container.
+@try_manual
+def step_model_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/put/CreateOrUpdate Model Version.
+@try_manual
+def step_model_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version create '
+ '--name "{myMachinelearningservice3}" '
+ '--properties description="Model version description" assetPath={{"path":"LocalUpload/12345/some/path","is'
+ 'Directory":true}} datastoreId="/subscriptions/{subscription_id}/resourceGroups/{rg_3}/providers/Microsoft'
+ '.MachineLearningServices/workspaces/{myWorkspace7}/datastores/{myDatastore}" '
+ 'properties={{"prop1":"value1","prop2":"value2"}} stage="Production" tags={{"tag1":"value1","tag2":"value2'
+ '"}} '
+ '--resource-group "{rg_3}" '
+ '--version "999" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/get/Get Model Version.
+@try_manual
+def step_model_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version show '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "999" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/get/List Model Version.
+@try_manual
+def step_model_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version list '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "999" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/delete/Delete Model Version.
+@try_manual
+def step_model_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version delete -y '
+ '--name "{myMachinelearningservice3}" '
+ '--resource-group "{rg_3}" '
+ '--version "999" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /Notebooks/post/List Workspace Keys
+@try_manual
+def step_notebook_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices notebook list-key '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Notebooks/post/Prepare Notebook
+@try_manual
+def step_notebook_prepare(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices notebook prepare '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/put/CreateOrUpdate Online Deployment.
+@try_manual
+def step_online_deployment_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment create '
+ '--user-assigned-identities "{{\\"additionalProp1\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"st'
+ 'ring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}},\\"additionalProp2\\":{{\\"clientId\\'
+ '":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}'
+ ',\\"additionalProp3\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"s'
+ 'tring\\",\\"tenantId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--location "string" '
+ '--properties description="string" codeConfiguration={{"codeArtifactId":"string","command":"string"}} '
+ 'deploymentConfiguration={{"appInsightsEnabled":true,"computeType":"Managed","maxConcurrentRequestsPerInst'
+ 'ance":0,"maxQueueWaitMs":0,"scoringTimeoutMs":0}} environmentId="string" modelReference={{"assetId":"stri'
+ 'ng","referenceType":"Id"}} properties={{"additionalProp1":"string","additionalProp2":"string","additional'
+ 'Prop3":"string"}} scaleSettings={{"instanceCount":0,"maximum":0,"minimum":0,"scaleType":"Automatic"}} '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/get/Get Online Deployment.
+@try_manual
+def step_online_deployment_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment show '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/get/List Online Deployment.
+@try_manual
+def step_online_deployment_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment list '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/patch/Update Online Deployment.
+@try_manual
+def step_online_deployment_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment update '
+ '--user-assigned-identities "{{\\"additionalProp1\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"st'
+ 'ring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}},\\"additionalProp2\\":{{\\"clientId\\'
+ '":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}'
+ ',\\"additionalProp3\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"s'
+ 'tring\\",\\"tenantId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--deployment-configuration "{{\\"appInsightsEnabled\\":true,\\"computeType\\":\\"Managed\\",\\"maxConcurr'
+ 'entRequestsPerInstance\\":0,\\"maxQueueWaitMs\\":0,\\"scoringTimeoutMs\\":0}}" '
+ '--scale-settings instance-count=0 maximum=0 minimum=0 scale-type="Automatic" '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/post/GetLogs Online Deployment.
+@try_manual
+def step_online_deployment_get_log(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment get-log '
+ '--container-type "StorageInitializer" '
+ '--tail 0 '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/delete/Delete Online Deployment.
+@try_manual
+def step_online_deployment_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment delete -y '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/put/CreateOrUpdate Online Endpoint.
+@try_manual
+def step_online_endpoint_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint create '
+ '--user-assigned-identities "{{\\"additionalProp1\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"st'
+ 'ring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}},\\"additionalProp2\\":{{\\"clientId\\'
+ '":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}'
+ ',\\"additionalProp3\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"s'
+ 'tring\\",\\"tenantId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--location "string" '
+ '--properties description="string" authMode="AMLToken" computeConfiguration={{"computeType":"Managed"}} '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'trafficRules={{"additionalProp1":0,"additionalProp2":0,"additionalProp3":0}} '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/get/Get Online Endpoint.
+@try_manual
+def step_online_endpoint_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint show '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/get/List Online Endpoint.
+@try_manual
+def step_online_endpoint_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/patch/Update Online Endpoint.
+@try_manual
+def step_online_endpoint_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint update '
+ '--user-assigned-identities "{{\\"additionalProp1\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"st'
+ 'ring\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}},\\"additionalProp2\\":{{\\"clientId\\'
+ '":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"}}'
+ ',\\"additionalProp3\\":{{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"s'
+ 'tring\\",\\"tenantId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--traffic-rules additionalProp1=0 additionalProp2=0 additionalProp3=0 '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/post/GetToken Online Endpoint.
+@try_manual
+def step_online_endpoint_get_token(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint get-token '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/post/ListKeys Online Endpoint.
+@try_manual
+def step_online_endpoint_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint list-key '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/post/RegenerateKeys Online Endpoint.
+@try_manual
+def step_online_endpoint_regenerate_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint regenerate-key '
+ '--key-type "Primary" '
+ '--key-value "string" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/delete/Delete Online Endpoint.
+@try_manual
+def step_online_endpoint_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint delete -y '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace7}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/put/WorkspacePutPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_put(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection put '
+ '--name "{myPrivateEndpointConnection}" '
+ '--private-link-service-connection-state description="Auto-Approved" status="Approved" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/get/WorkspaceGetPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection show '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/delete/WorkspaceDeletePrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection delete -y '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateLinkResources/get/WorkspaceListPrivateLinkResources
+@try_manual
+def step_private_link_resource_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-link-resource list '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/get/List workspace quotas by VMFamily
+@try_manual
+def step_quota_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/post/update quotas
+@try_manual
+def step_quota_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota update '
+ '--location "eastus" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace3}/quotas/{myQuot'
+ 'a}" limit=100 unit="Count" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace4}/quotas/{myQuot'
+ 'a}" limit=200 unit="Count"',
+ checks=checks)
+
+
+# EXAMPLE: /Usages/get/List Usages
+@try_manual
+def step_usage_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices usage list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /VirtualMachineSizes/get/List VM Sizes
+@try_manual
+def step_virtual_machine_size_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices virtual-machine-size list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/put/CreateWorkspaceConnection
+@try_manual
+def step_workspace_connection_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection create '
+ '--connection-name "{myMachinelearningservice2}" '
+ '--name "{myMachinelearningservice2}" '
+ '--auth-type "PAT" '
+ '--category "ACR" '
+ '--target "www.facebook.com" '
+ '--value "secrets" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/GetWorkspaceConnection
+@try_manual
+def step_workspace_connection_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection show '
+ '--connection-name "{myMachinelearningservice2}" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/ListWorkspaceConnections
+@try_manual
+def step_workspace_connection_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection list '
+ '--category "ACR" '
+ '--resource-group "{rg_7}" '
+ '--target "www.facebook.com" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/delete/DeleteWorkspaceConnection
+@try_manual
+def step_workspace_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection delete -y '
+ '--connection-name "{myMachinelearningservice2}" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceFeatures/get/List Workspace features
+@try_manual
+def step_workspace_feature_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-feature list '
+ '--resource-group "{rg_4}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/delete/Delete Workspace
+@try_manual
+def step_workspace_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace delete -y '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
new file mode 100644
index 00000000000..687299ce2e5
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
@@ -0,0 +1,443 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+import os
+from azure.cli.testsdk import ScenarioTest
+from azure.cli.testsdk import ResourceGroupPreparer
+from azure.cli.testsdk import StorageAccountPreparer
+from .example_steps import step_workspace_create
+from .example_steps import step_workspace_show
+from .example_steps import step_workspace_list
+from .example_steps import step_workspace_list2
+from .example_steps import step_workspace_update
+from .example_steps import step_workspace_list_key
+from .example_steps import step_workspace_resync_key
+from .example_steps import step_code_container_create
+from .example_steps import step_code_container_show
+from .example_steps import step_code_container_list
+from .example_steps import step_code_container_delete
+from .example_steps import step_code_version_create
+from .example_steps import step_code_version_show
+from .example_steps import step_code_version_list
+from .example_steps import step_code_version_delete
+from .example_steps import step_component_container_create
+from .example_steps import step_component_container_show
+from .example_steps import step_component_container_list
+from .example_steps import step_component_container_delete
+from .example_steps import step_component_version_create
+from .example_steps import step_component_version_show
+from .example_steps import step_component_version_list
+from .example_steps import step_component_version_delete
+from .example_steps import step_data_container_create
+from .example_steps import step_data_container_show
+from .example_steps import step_data_container_list
+from .example_steps import step_data_container_delete
+from .example_steps import step_datastore_create
+from .example_steps import step_datastore_show
+from .example_steps import step_datastore_list
+from .example_steps import step_datastore_list_secret
+from .example_steps import step_datastore_delete
+from .example_steps import step_data_version_create
+from .example_steps import step_data_version_show
+from .example_steps import step_data_version_list
+from .example_steps import step_data_version_delete
+from .example_steps import step_environment_container_create
+from .example_steps import step_environment_container_show
+from .example_steps import step_environment_container_list
+from .example_steps import step_environment_container_delete
+from .example_steps import step_environment_specification_version_create
+from .example_steps import step_environment_specification_version_show
+from .example_steps import step_environment_specification_version_list
+from .example_steps import step_environment_specification_version_delete
+from .example_steps import step_job_create
+from .example_steps import step_job_create2
+from .example_steps import step_job_show
+from .example_steps import step_job_show2
+from .example_steps import step_job_list
+from .example_steps import step_job_list2
+from .example_steps import step_job_cancel
+from .example_steps import step_job_cancel2
+from .example_steps import step_job_delete
+from .example_steps import step_job_delete2
+from .example_steps import step_labeling_job_create
+from .example_steps import step_labeling_job_show
+from .example_steps import step_labeling_job_list
+from .example_steps import step_labeling_job_export_label
+from .example_steps import step_labeling_job_pause
+from .example_steps import step_labeling_job_resume
+from .example_steps import step_labeling_job_delete
+from .example_steps import step_linked_service_create
+from .example_steps import step_linked_service_show
+from .example_steps import step_linked_service_list
+from .example_steps import step_linked_service_delete
+from .example_steps import step_machine_learning_compute_aks_create
+from .example_steps import step_machine_learning_compute_aks_create2
+from .example_steps import step_machine_learning_compute_aks_create3
+from .example_steps import step_machine_learning_compute_aks_create4
+from .example_steps import step_machine_learning_compute_aks_create5
+from .example_steps import step_machine_learning_compute_show
+from .example_steps import step_machine_learning_compute_show2
+from .example_steps import step_machine_learning_compute_show3
+from .example_steps import step_machine_learning_compute_list
+from .example_steps import step_machine_learning_compute_update
+from .example_steps import step_machine_learning_compute_list_node
+from .example_steps import step_machine_learning_compute_list_key
+from .example_steps import step_machine_learning_compute_restart
+from .example_steps import step_machine_learning_compute_start
+from .example_steps import step_machine_learning_compute_stop
+from .example_steps import step_machine_learning_compute_delete
+from .example_steps import step_machine_learning_service_create
+from .example_steps import step_machine_learning_service_show
+from .example_steps import step_machine_learning_service_list
+from .example_steps import step_machine_learning_service_delete
+from .example_steps import step_list_sku
+from .example_steps import step_model_container_create
+from .example_steps import step_model_container_show
+from .example_steps import step_model_container_list
+from .example_steps import step_model_container_delete
+from .example_steps import step_model_version_create
+from .example_steps import step_model_version_show
+from .example_steps import step_model_version_list
+from .example_steps import step_model_version_delete
+from .example_steps import step_notebook_list_key
+from .example_steps import step_notebook_prepare
+from .example_steps import step_online_deployment_create
+from .example_steps import step_online_deployment_show
+from .example_steps import step_online_deployment_list
+from .example_steps import step_online_deployment_update
+from .example_steps import step_online_deployment_get_log
+from .example_steps import step_online_deployment_delete
+from .example_steps import step_online_endpoint_create
+from .example_steps import step_online_endpoint_show
+from .example_steps import step_online_endpoint_list
+from .example_steps import step_online_endpoint_update
+from .example_steps import step_online_endpoint_get_token
+from .example_steps import step_online_endpoint_list_key
+from .example_steps import step_online_endpoint_regenerate_key
+from .example_steps import step_online_endpoint_delete
+from .example_steps import step_private_endpoint_connection_put
+from .example_steps import step_private_endpoint_connection_show
+from .example_steps import step_private_endpoint_connection_delete
+from .example_steps import step_private_link_resource_list
+from .example_steps import step_quota_list
+from .example_steps import step_quota_update
+from .example_steps import step_usage_list
+from .example_steps import step_virtual_machine_size_list
+from .example_steps import step_workspace_connection_create
+from .example_steps import step_workspace_connection_show
+from .example_steps import step_workspace_connection_list
+from .example_steps import step_workspace_connection_delete
+from .example_steps import step_workspace_feature_list
+from .example_steps import step_workspace_delete
+from .. import (
+ try_manual,
+ raise_if,
+ calc_coverage
+)
+
+
+TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+
+# Env setup_scenario
+@try_manual
+def setup_scenario(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6):
+ pass
+
+
+# Env cleanup_scenario
+@try_manual
+def cleanup_scenario(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6):
+ pass
+
+
+# Testcase: Scenario
+@try_manual
+def call_scenario(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6):
+ setup_scenario(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6)
+ step_workspace_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[
+ test.check("identity.type", "SystemAssigned", case_sensitive=False),
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("sku.name", "Basic", case_sensitive=False),
+ test.check("sku.tier", "Basic", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_workspace_list2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[
+ test.check('length(@)', 2),
+ ])
+ step_workspace_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "new description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("friendlyName", "New friendly name", case_sensitive=False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("sku.name", "Enterprise", case_sensitive=False),
+ test.check("sku.tier", "Enterprise", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_workspace_resync_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_code_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_component_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_datastore_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_datastore_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_datastore_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_datastore_list_secret(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_datastore_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_data_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_specification_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_specification_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_specification_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_environment_specification_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_show2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_list2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_cancel(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_cancel2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_job_delete2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_labeling_job_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_labeling_job_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_labeling_job_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_labeling_job_export_label(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_labeling_job_pause(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_labeling_job_resume(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_labeling_job_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_linked_service_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_linked_service_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_linked_service_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_linked_service_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_show2(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_show3(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_list_node(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_restart(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_start(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_stop(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_compute_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_service_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_service_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_service_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_machine_learning_service_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_list_sku(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_container_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_container_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_container_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_container_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_version_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_version_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_version_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_model_version_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_notebook_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_notebook_prepare(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_deployment_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_deployment_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_deployment_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_deployment_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_deployment_get_log(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_deployment_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_get_token(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_list_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_regenerate_key(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_online_endpoint_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_private_endpoint_connection_put(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_private_endpoint_connection_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[
+ test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False),
+ ])
+ step_private_endpoint_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_private_link_resource_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_quota_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_quota_update(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_usage_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_virtual_machine_size_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_workspace_connection_create(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_workspace_connection_show(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_workspace_connection_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_workspace_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_workspace_feature_list(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ step_workspace_delete(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6, checks=[])
+ cleanup_scenario(test, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6)
+
+
+# Test class for Scenario
+@try_manual
+class MachinelearningservicesScenarioTest(ScenarioTest):
+
+ def __init__(self, *args, **kwargs):
+ super(MachinelearningservicesScenarioTest, self).__init__(*args, **kwargs)
+ self.kwargs.update({
+ 'subscription_id': self.get_subscription_id()
+ })
+
+ self.kwargs.update({
+ 'myMachinelearningservice': 'link-1',
+ 'myMachinelearningservice2': 'connection-1',
+ 'myMachinelearningservice3': 'testContainer',
+ 'myMachinelearningservice4': 'datacontainer123',
+ 'myMachinelearningservice5': 'testDatastore',
+ 'myMachinelearningservice6': 'dataset123',
+ 'myWorkspace8': 'default',
+ 'myPrivateLinkResource2': 'default',
+ 'myWorkspace3': 'demo_workspace1',
+ 'myWorkspace4': 'demo_workspace2',
+ 'myWorkspace6': 'Syn-1',
+ 'myWorkspace7': 'workspace123',
+ 'myWorkspace': 'testworkspace',
+ 'myWorkspace2': 'workspaces123',
+ 'myWorkspace5': 'workspace-1',
+ 'myQuota': 'Standard_DSv2_Family_Cluster_Dedicated_vCPUs',
+ 'myPrivateEndpointConnection': '{privateEndpointConnectionName}',
+ 'myPrivateLinkResource': 'Sql',
+ 'myDatastore': 'datastore123',
+ })
+
+
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_workspace-1234'[:7], key='rg',
+ parameter_name='rg')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_accountcrud-1234'[:7], key='rg_2',
+ parameter_name='rg_2')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg'[:7], key='rg_5', parameter_name='rg_5')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_myResourceGroup'[:7], key='rg_4',
+ parameter_name='rg_4')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_resourceGroup-1'[:7], key='rg_7',
+ parameter_name='rg_7')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_testrg123'[:7], key='rg_3',
+ parameter_name='rg_3')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg-1234'[:7], key='rg_6',
+ parameter_name='rg_6')
+ @StorageAccountPreparer(name_prefix='clitestmachinelearningservices_testStorageAccount'[:7], key='sa',
+ resource_group_parameter_name='rg_2')
+ def test_machinelearningservices_Scenario(self, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6):
+ call_scenario(self, rg, rg_2, rg_5, rg_4, rg_7, rg_3, rg_6)
+ calc_coverage(__file__)
+ raise_if()
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
new file mode 100644
index 00000000000..dad2c6eeb01
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
@@ -0,0 +1,16 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+try:
+ from ._patch import patch_sdk # type: ignore
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..3b40edf5449
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
@@ -0,0 +1,205 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.mgmt.core import ARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Optional
+
+ from azure.core.credentials import TokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import MachineLearningComputeOperations
+from .operations import AzureMachineLearningWorkspacesOperationsMixin
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import LinkedServicesOperations
+from .operations import MachineLearningServiceOperations
+from .operations import NotebooksOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import CodeContainersOperations
+from .operations import CodeVersionsOperations
+from .operations import ComponentContainersOperations
+from .operations import ComponentVersionsOperations
+from .operations import DataContainersOperations
+from .operations import DatastoresOperations
+from .operations import DataVersionsOperations
+from .operations import EnvironmentContainersOperations
+from .operations import EnvironmentSpecificationVersionsOperations
+from .operations import JobsOperations
+from .operations import LabelingJobsOperations
+from .operations import ModelContainersOperations
+from .operations import ModelVersionsOperations
+from .operations import OnlineDeploymentsOperations
+from .operations import OnlineEndpointsOperations
+from . import models
+
+
+class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.operations.WorkspacesOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.operations.WorkspaceFeaturesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.operations.QuotasOperations
+ :ivar machine_learning_compute: MachineLearningComputeOperations operations
+ :vartype machine_learning_compute: azure_machine_learning_workspaces.operations.MachineLearningComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.operations.PrivateLinkResourcesOperations
+ :ivar linked_services: LinkedServicesOperations operations
+ :vartype linked_services: azure_machine_learning_workspaces.operations.LinkedServicesOperations
+ :ivar machine_learning_service: MachineLearningServiceOperations operations
+ :vartype machine_learning_service: azure_machine_learning_workspaces.operations.MachineLearningServiceOperations
+ :ivar notebooks: NotebooksOperations operations
+ :vartype notebooks: azure_machine_learning_workspaces.operations.NotebooksOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.operations.WorkspaceConnectionsOperations
+ :ivar code_containers: CodeContainersOperations operations
+ :vartype code_containers: azure_machine_learning_workspaces.operations.CodeContainersOperations
+ :ivar code_versions: CodeVersionsOperations operations
+ :vartype code_versions: azure_machine_learning_workspaces.operations.CodeVersionsOperations
+ :ivar component_containers: ComponentContainersOperations operations
+ :vartype component_containers: azure_machine_learning_workspaces.operations.ComponentContainersOperations
+ :ivar component_versions: ComponentVersionsOperations operations
+ :vartype component_versions: azure_machine_learning_workspaces.operations.ComponentVersionsOperations
+ :ivar data_containers: DataContainersOperations operations
+ :vartype data_containers: azure_machine_learning_workspaces.operations.DataContainersOperations
+ :ivar datastores: DatastoresOperations operations
+ :vartype datastores: azure_machine_learning_workspaces.operations.DatastoresOperations
+ :ivar data_versions: DataVersionsOperations operations
+ :vartype data_versions: azure_machine_learning_workspaces.operations.DataVersionsOperations
+ :ivar environment_containers: EnvironmentContainersOperations operations
+ :vartype environment_containers: azure_machine_learning_workspaces.operations.EnvironmentContainersOperations
+ :ivar environment_specification_versions: EnvironmentSpecificationVersionsOperations operations
+ :vartype environment_specification_versions: azure_machine_learning_workspaces.operations.EnvironmentSpecificationVersionsOperations
+ :ivar jobs: JobsOperations operations
+ :vartype jobs: azure_machine_learning_workspaces.operations.JobsOperations
+ :ivar labeling_jobs: LabelingJobsOperations operations
+ :vartype labeling_jobs: azure_machine_learning_workspaces.operations.LabelingJobsOperations
+ :ivar model_containers: ModelContainersOperations operations
+ :vartype model_containers: azure_machine_learning_workspaces.operations.ModelContainersOperations
+ :ivar model_versions: ModelVersionsOperations operations
+ :vartype model_versions: azure_machine_learning_workspaces.operations.ModelVersionsOperations
+ :ivar online_deployments: OnlineDeploymentsOperations operations
+ :vartype online_deployments: azure_machine_learning_workspaces.operations.OnlineDeploymentsOperations
+ :ivar online_endpoints: OnlineEndpointsOperations operations
+ :vartype online_endpoints: azure_machine_learning_workspaces.operations.OnlineEndpointsOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ base_url=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_compute = MachineLearningComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.linked_services = LinkedServicesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_service = MachineLearningServiceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.notebooks = NotebooksOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_containers = CodeContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_versions = CodeVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.component_containers = ComponentContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.component_versions = ComponentVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_containers = DataContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.datastores = DatastoresOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_versions = DataVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_containers = EnvironmentContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_specification_versions = EnvironmentSpecificationVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.jobs = JobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.labeling_jobs = LabelingJobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_containers = ModelContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_versions = ModelVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_deployments = OnlineDeploymentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_endpoints = OnlineEndpointsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> AzureMachineLearningWorkspaces
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
new file mode 100644
index 00000000000..17bae221bec
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
@@ -0,0 +1,70 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2020-09-01-preview"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
new file mode 100644
index 00000000000..872474577c4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
@@ -0,0 +1,10 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..78e2ad4fc6b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,199 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Optional, TYPE_CHECKING
+
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import MachineLearningComputeOperations
+from .operations import AzureMachineLearningWorkspacesOperationsMixin
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import LinkedServicesOperations
+from .operations import MachineLearningServiceOperations
+from .operations import NotebooksOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import CodeContainersOperations
+from .operations import CodeVersionsOperations
+from .operations import ComponentContainersOperations
+from .operations import ComponentVersionsOperations
+from .operations import DataContainersOperations
+from .operations import DatastoresOperations
+from .operations import DataVersionsOperations
+from .operations import EnvironmentContainersOperations
+from .operations import EnvironmentSpecificationVersionsOperations
+from .operations import JobsOperations
+from .operations import LabelingJobsOperations
+from .operations import ModelContainersOperations
+from .operations import ModelVersionsOperations
+from .operations import OnlineDeploymentsOperations
+from .operations import OnlineEndpointsOperations
+from .. import models
+
+
+class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.aio.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.aio.operations.WorkspacesOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.aio.operations.WorkspaceFeaturesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.aio.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.aio.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.aio.operations.QuotasOperations
+ :ivar machine_learning_compute: MachineLearningComputeOperations operations
+ :vartype machine_learning_compute: azure_machine_learning_workspaces.aio.operations.MachineLearningComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.aio.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.aio.operations.PrivateLinkResourcesOperations
+ :ivar linked_services: LinkedServicesOperations operations
+ :vartype linked_services: azure_machine_learning_workspaces.aio.operations.LinkedServicesOperations
+ :ivar machine_learning_service: MachineLearningServiceOperations operations
+ :vartype machine_learning_service: azure_machine_learning_workspaces.aio.operations.MachineLearningServiceOperations
+ :ivar notebooks: NotebooksOperations operations
+ :vartype notebooks: azure_machine_learning_workspaces.aio.operations.NotebooksOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.aio.operations.WorkspaceConnectionsOperations
+ :ivar code_containers: CodeContainersOperations operations
+ :vartype code_containers: azure_machine_learning_workspaces.aio.operations.CodeContainersOperations
+ :ivar code_versions: CodeVersionsOperations operations
+ :vartype code_versions: azure_machine_learning_workspaces.aio.operations.CodeVersionsOperations
+ :ivar component_containers: ComponentContainersOperations operations
+ :vartype component_containers: azure_machine_learning_workspaces.aio.operations.ComponentContainersOperations
+ :ivar component_versions: ComponentVersionsOperations operations
+ :vartype component_versions: azure_machine_learning_workspaces.aio.operations.ComponentVersionsOperations
+ :ivar data_containers: DataContainersOperations operations
+ :vartype data_containers: azure_machine_learning_workspaces.aio.operations.DataContainersOperations
+ :ivar datastores: DatastoresOperations operations
+ :vartype datastores: azure_machine_learning_workspaces.aio.operations.DatastoresOperations
+ :ivar data_versions: DataVersionsOperations operations
+ :vartype data_versions: azure_machine_learning_workspaces.aio.operations.DataVersionsOperations
+ :ivar environment_containers: EnvironmentContainersOperations operations
+ :vartype environment_containers: azure_machine_learning_workspaces.aio.operations.EnvironmentContainersOperations
+ :ivar environment_specification_versions: EnvironmentSpecificationVersionsOperations operations
+ :vartype environment_specification_versions: azure_machine_learning_workspaces.aio.operations.EnvironmentSpecificationVersionsOperations
+ :ivar jobs: JobsOperations operations
+ :vartype jobs: azure_machine_learning_workspaces.aio.operations.JobsOperations
+ :ivar labeling_jobs: LabelingJobsOperations operations
+ :vartype labeling_jobs: azure_machine_learning_workspaces.aio.operations.LabelingJobsOperations
+ :ivar model_containers: ModelContainersOperations operations
+ :vartype model_containers: azure_machine_learning_workspaces.aio.operations.ModelContainersOperations
+ :ivar model_versions: ModelVersionsOperations operations
+ :vartype model_versions: azure_machine_learning_workspaces.aio.operations.ModelVersionsOperations
+ :ivar online_deployments: OnlineDeploymentsOperations operations
+ :vartype online_deployments: azure_machine_learning_workspaces.aio.operations.OnlineDeploymentsOperations
+ :ivar online_endpoints: OnlineEndpointsOperations operations
+ :vartype online_endpoints: azure_machine_learning_workspaces.aio.operations.OnlineEndpointsOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ base_url: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_compute = MachineLearningComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.linked_services = LinkedServicesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_service = MachineLearningServiceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.notebooks = NotebooksOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_containers = CodeContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_versions = CodeVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.component_containers = ComponentContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.component_versions = ComponentVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_containers = DataContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.datastores = DatastoresOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_versions = DataVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_containers = EnvironmentContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_specification_versions = EnvironmentSpecificationVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.jobs = JobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.labeling_jobs = LabelingJobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_containers = ModelContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_versions = ModelVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_deployments = OnlineDeploymentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_endpoints = OnlineEndpointsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
new file mode 100644
index 00000000000..08207f34034
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
@@ -0,0 +1,66 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ **kwargs: Any
+ ) -> None:
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2020-09-01-preview"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs: Any
+ ) -> None:
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
new file mode 100644
index 00000000000..9cd96ead8ac
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
@@ -0,0 +1,69 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._machine_learning_compute_operations import MachineLearningComputeOperations
+from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._linked_services_operations import LinkedServicesOperations
+from ._machine_learning_service_operations import MachineLearningServiceOperations
+from ._notebooks_operations import NotebooksOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._code_containers_operations import CodeContainersOperations
+from ._code_versions_operations import CodeVersionsOperations
+from ._component_containers_operations import ComponentContainersOperations
+from ._component_versions_operations import ComponentVersionsOperations
+from ._data_containers_operations import DataContainersOperations
+from ._datastores_operations import DatastoresOperations
+from ._data_versions_operations import DataVersionsOperations
+from ._environment_containers_operations import EnvironmentContainersOperations
+from ._environment_specification_versions_operations import EnvironmentSpecificationVersionsOperations
+from ._jobs_operations import JobsOperations
+from ._labeling_jobs_operations import LabelingJobsOperations
+from ._model_containers_operations import ModelContainersOperations
+from ._model_versions_operations import ModelVersionsOperations
+from ._online_deployments_operations import OnlineDeploymentsOperations
+from ._online_endpoints_operations import OnlineEndpointsOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'WorkspaceFeaturesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'MachineLearningComputeOperations',
+ 'AzureMachineLearningWorkspacesOperationsMixin',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'LinkedServicesOperations',
+ 'MachineLearningServiceOperations',
+ 'NotebooksOperations',
+ 'WorkspaceConnectionsOperations',
+ 'CodeContainersOperations',
+ 'CodeVersionsOperations',
+ 'ComponentContainersOperations',
+ 'ComponentVersionsOperations',
+ 'DataContainersOperations',
+ 'DatastoresOperations',
+ 'DataVersionsOperations',
+ 'EnvironmentContainersOperations',
+ 'EnvironmentSpecificationVersionsOperations',
+ 'JobsOperations',
+ 'LabelingJobsOperations',
+ 'ModelContainersOperations',
+ 'ModelVersionsOperations',
+ 'OnlineDeploymentsOperations',
+ 'OnlineEndpointsOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py
new file mode 100644
index 00000000000..303297806ce
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py
@@ -0,0 +1,89 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class AzureMachineLearningWorkspacesOperationsMixin:
+
+ def list_skus(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.SkuListResult"]:
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_skus.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_containers_operations.py
new file mode 100644
index 00000000000..5bc08d27b27
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_containers_operations.py
@@ -0,0 +1,328 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeContainersOperations:
+ """CodeContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.CodeContainerResource",
+ **kwargs
+ ) -> "models.CodeContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.CodeContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.CodeContainerResourceArmPaginatedResult"]:
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.CodeContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_versions_operations.py
new file mode 100644
index 00000000000..57580efc78f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_versions_operations.py
@@ -0,0 +1,354 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeVersionsOperations:
+ """CodeVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.CodeVersionResource",
+ **kwargs
+ ) -> "models.CodeVersionResource":
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.CodeVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.CodeVersionResourceArmPaginatedResult"]:
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.CodeVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_component_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_component_containers_operations.py
new file mode 100644
index 00000000000..6faaa3806b9
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_component_containers_operations.py
@@ -0,0 +1,328 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ComponentContainersOperations:
+ """ComponentContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ComponentContainerResource",
+ **kwargs
+ ) -> "models.ComponentContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ComponentContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ComponentContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComponentContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ComponentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ComponentContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComponentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ComponentContainerResourceArmPaginatedResult"]:
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ComponentContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ComponentContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_component_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_component_versions_operations.py
new file mode 100644
index 00000000000..ab63de0e4cd
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_component_versions_operations.py
@@ -0,0 +1,354 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ComponentVersionsOperations:
+ """ComponentVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ComponentVersionResource",
+ **kwargs
+ ) -> "models.ComponentVersionResource":
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ComponentVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ComponentVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComponentVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ComponentVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ComponentVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComponentVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ComponentVersionResourceArmPaginatedResult"]:
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ComponentVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ComponentVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_containers_operations.py
new file mode 100644
index 00000000000..9056b313c04
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_containers_operations.py
@@ -0,0 +1,328 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DataContainersOperations:
+ """DataContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DataContainerResource",
+ **kwargs
+ ) -> "models.DataContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DataContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.DataContainerResourceArmPaginatedResult"]:
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.DataContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_versions_operations.py
new file mode 100644
index 00000000000..b8f2b3d5f2a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_versions_operations.py
@@ -0,0 +1,354 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DataVersionsOperations:
+ """DataVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DataVersionResource",
+ **kwargs
+ ) -> "models.DataVersionResource":
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DataVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.DataVersionResourceArmPaginatedResult"]:
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.DataVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_datastores_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_datastores_operations.py
new file mode 100644
index 00000000000..8328e9ca182
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_datastores_operations.py
@@ -0,0 +1,423 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DatastoresOperations:
+ """DatastoresOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ count: Optional[int] = 30,
+ is_default: Optional[bool] = None,
+ names: Optional[List[str]] = None,
+ search_text: Optional[str] = None,
+ order_by: Optional[str] = None,
+ order_by_asc: Optional[bool] = False,
+ **kwargs
+ ) -> AsyncIterable["models.DatastorePropertiesResourceArmPaginatedResult"]:
+ """List datastores.
+
+ List datastores.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :param is_default: Filter down to the workspace default datastore.
+ :type is_default: bool
+ :param names: Names of datastores to return.
+ :type names: list[str]
+ :param search_text: Text to search for in the datastore names.
+ :type search_text: str
+ :param order_by: Order by property (createdtime | modifiedtime | name).
+ :type order_by: str
+ :param order_by_asc: Order by property in ascending order.
+ :type order_by_asc: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DatastorePropertiesResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.DatastorePropertiesResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if is_default is not None:
+ query_parameters['isDefault'] = self._serialize.query("is_default", is_default, 'bool')
+ if names is not None:
+ query_parameters['names'] = self._serialize.query("names", names, '[str]')
+ if search_text is not None:
+ query_parameters['searchText'] = self._serialize.query("search_text", search_text, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if order_by_asc is not None:
+ query_parameters['orderByAsc'] = self._serialize.query("order_by_asc", order_by_asc, 'bool')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('DatastorePropertiesResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete datastore.
+
+ Delete datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DatastorePropertiesResource":
+ """Get datastore.
+
+ Get datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DatastorePropertiesResource",
+ **kwargs
+ ) -> "models.DatastorePropertiesResource":
+ """Create or update datastore.
+
+ Create or update datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Datastore entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DatastorePropertiesResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ async def list_secrets(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DatastoreCredentials":
+ """Get datastore secrets.
+
+ Get datastore secrets.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastoreCredentials, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastoreCredentials"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_secrets.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastoreCredentials', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_secrets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_containers_operations.py
new file mode 100644
index 00000000000..4b82f5c4dc2
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_containers_operations.py
@@ -0,0 +1,328 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentContainersOperations:
+ """EnvironmentContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.EnvironmentContainerResource",
+ **kwargs
+ ) -> "models.EnvironmentContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EnvironmentContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.EnvironmentContainerResourceArmPaginatedResult"]:
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.EnvironmentContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_specification_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_specification_versions_operations.py
new file mode 100644
index 00000000000..e3adfc35b22
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_specification_versions_operations.py
@@ -0,0 +1,354 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentSpecificationVersionsOperations:
+ """EnvironmentSpecificationVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.EnvironmentSpecificationVersionResource",
+ **kwargs
+ ) -> "models.EnvironmentSpecificationVersionResource":
+ """Creates or updates an EnvironmentSpecificationVersion.
+
+ Creates or updates an EnvironmentSpecificationVersion.
+
+ :param name: Name of EnvironmentSpecificationVersion.
+ :type name: str
+ :param version: Version of EnvironmentSpecificationVersion.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Definition of EnvironmentSpecificationVersion.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentSpecificationVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EnvironmentSpecificationVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]:
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentSpecificationVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_jobs_operations.py
new file mode 100644
index 00000000000..79c643a30cb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_jobs_operations.py
@@ -0,0 +1,468 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class JobsOperations:
+ """JobsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def create_or_update(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.JobBaseResource",
+ **kwargs
+ ) -> "models.JobBaseResource":
+ """Creates and executes a Job.
+
+ Creates and executes a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Job definition object.
+ :type body: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'JobBaseResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ async def get(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.JobBaseResource":
+ """Gets a Job by name/id.
+
+ Gets a Job by name/id.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes a Job.
+
+ Deletes a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ job_type: Optional[str] = None,
+ tags: Optional[str] = None,
+ tag: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.JobBaseResourceArmPaginatedResult"]:
+ """Lists Jobs in the workspace.
+
+ Lists Jobs in the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param job_type: Type of job to be returned.
+ :type job_type: str
+ :param tags: Tags for job to be returned.
+ :type tags: str
+ :param tag: Jobs returned will have this tag key.
+ :type tag: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.JobBaseResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if job_type is not None:
+ query_parameters['jobType'] = self._serialize.query("job_type", job_type, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('JobBaseResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs'} # type: ignore
+
+ async def cancel(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Cancels a Job.
+
+ Cancels a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.cancel.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_labeling_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_labeling_jobs_operations.py
new file mode 100644
index 00000000000..3b66e2adbde
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_labeling_jobs_operations.py
@@ -0,0 +1,739 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class LabelingJobsOperations:
+ """LabelingJobsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def _create_or_update_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.LabelingJobResource",
+ **kwargs
+ ) -> "models.LabelingJobResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'LabelingJobResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.LabelingJobResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.LabelingJobResource"]:
+ """Creates or updates a labeling job.
+
+ Creates or updates a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: LabelingJob definition object.
+ :type body: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either LabelingJobResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ async def get(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ include_job_instructions: Optional[bool] = None,
+ include_label_categories: Optional[bool] = None,
+ **kwargs
+ ) -> "models.LabelingJobResource":
+ """Gets a labeling job by name/id.
+
+ Gets a labeling job by name/id.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param include_job_instructions: Boolean value to indicate whether to include JobInstructions
+ in response.
+ :type include_job_instructions: bool
+ :param include_label_categories: Boolean value to indicate Whether to include LabelCategories
+ in response.
+ :type include_label_categories: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LabelingJobResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if include_job_instructions is not None:
+ query_parameters['includeJobInstructions'] = self._serialize.query("include_job_instructions", include_job_instructions, 'bool')
+ if include_label_categories is not None:
+ query_parameters['includeLabelCategories'] = self._serialize.query("include_label_categories", include_label_categories, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ async def delete(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a labeling job.
+
+ Delete a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ count: Optional[int] = None,
+ **kwargs
+ ) -> AsyncIterable["models.LabelingJobResourceArmPaginatedResult"]:
+ """Lists labeling jobs in the workspace.
+
+ Lists labeling jobs in the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param count: Number of labeling jobs to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.LabelingJobResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('LabelingJobResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs'} # type: ignore
+
+ async def pause(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Pause a labeling job.
+
+ Pause a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.pause.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ pause.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause'} # type: ignore
+
+ async def _resume_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resume_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _resume_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
+
+ async def begin_resume(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Resume a labeling job.
+
+ Resume a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._resume_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resume.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
+
+ async def _export_labels_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ExportSummary",
+ **kwargs
+ ) -> Optional["models.ExportSummary"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ExportSummary"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._export_labels_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ExportSummary')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _export_labels_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
+
+ async def begin_export_labels(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ExportSummary",
+ **kwargs
+ ) -> AsyncLROPoller["models.ExportSummary"]:
+ """Export labels from a labeling job.
+
+ Export labels from a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The export summary.
+ :type body: ~azure_machine_learning_workspaces.models.ExportSummary
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ExportSummary]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ExportSummary"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._export_labels_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_export_labels.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_linked_services_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_linked_services_operations.py
new file mode 100644
index 00000000000..869bf4c927f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_linked_services_operations.py
@@ -0,0 +1,294 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class LinkedServicesOperations:
+ """LinkedServicesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.LinkedServiceList":
+ """List all linked services under an AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LinkedServiceList, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LinkedServiceList
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LinkedServiceList', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices'} # type: ignore
+
+ async def create(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ link_name: str,
+ parameters: "models.LinkedServiceRequest",
+ **kwargs
+ ) -> "models.LinkedServiceResponse":
+ """Add a new linked service.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param link_name: Friendly name of the linked workspace.
+ :type link_name: str
+ :param parameters: The object for creating or updating a linked service.
+ :type parameters: ~azure_machine_learning_workspaces.models.LinkedServiceRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LinkedServiceResponse, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LinkedServiceResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'linkName': self._serialize.url("link_name", link_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'LinkedServiceRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LinkedServiceResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices/{linkName}'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ link_name: str,
+ **kwargs
+ ) -> "models.LinkedServiceResponse":
+ """Get the detail of a linked service.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param link_name: Friendly name of the linked workspace.
+ :type link_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LinkedServiceResponse, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LinkedServiceResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'linkName': self._serialize.url("link_name", link_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LinkedServiceResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices/{linkName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ link_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a linked service.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param link_name: Friendly name of the linked workspace.
+ :type link_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'linkName': self._serialize.url("link_name", link_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices/{linkName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py
new file mode 100644
index 00000000000..cf6213abdd8
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py
@@ -0,0 +1,914 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningComputeOperations:
+ """MachineLearningComputeOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedComputeResourcesList"]:
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeResource":
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.AmlComputeNodesInformation"]:
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeSecrets":
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ async def start(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.start.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def stop(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.stop.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def restart(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_service_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_service_operations.py
new file mode 100644
index 00000000000..f3d51a3bb7e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_service_operations.py
@@ -0,0 +1,435 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningServiceOperations:
+ """MachineLearningServiceOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ model_id: Optional[str] = None,
+ model_name: Optional[str] = None,
+ tag: Optional[str] = None,
+ tags: Optional[str] = None,
+ properties: Optional[str] = None,
+ run_id: Optional[str] = None,
+ expand: Optional[bool] = None,
+ orderby: Optional[Union[str, "models.OrderString"]] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedServiceList"]:
+ """Gets services in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param model_id: The Model Id.
+ :type model_id: str
+ :param model_name: The Model name.
+ :type model_name: str
+ :param tag: The object tag.
+ :type tag: str
+ :param tags: A set of tags with which to filter the returned services. It is a comma separated
+ string of tags key or tags key=value Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned services. It is a
+ comma separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param run_id: runId for model associated with service.
+ :type run_id: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :param orderby: The option to order the response.
+ :type orderby: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedServiceList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedServiceList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedServiceList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if model_id is not None:
+ query_parameters['modelId'] = self._serialize.query("model_id", model_id, 'str')
+ if model_name is not None:
+ query_parameters['modelName'] = self._serialize.query("model_name", model_name, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if run_id is not None:
+ query_parameters['runId'] = self._serialize.query("run_id", run_id, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+ if orderby is not None:
+ query_parameters['orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedServiceList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ expand: Optional[bool] = False,
+ **kwargs
+ ) -> "models.ServiceResource":
+ """Get a Service by name.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ServiceResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ServiceResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a specific Service..
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ properties: "models.CreateServiceRequest",
+ **kwargs
+ ) -> Optional["models.ServiceResource"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ServiceResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'CreateServiceRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ properties: "models.CreateServiceRequest",
+ **kwargs
+ ) -> AsyncLROPoller["models.ServiceResource"]:
+ """Creates or updates service. This call will update a service if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new service, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param properties: The payload that is used to create or update the Service.
+ :type properties: ~azure_machine_learning_workspaces.models.CreateServiceRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ServiceResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_containers_operations.py
new file mode 100644
index 00000000000..8f820a4368e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_containers_operations.py
@@ -0,0 +1,333 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelContainersOperations:
+ """ModelContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ count: Optional[int] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ModelContainerResourceArmPaginatedResult"]:
+ """List model containers.
+
+ List model containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ModelContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ModelContainerResource",
+ **kwargs
+ ) -> "models.ModelContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ModelContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_versions_operations.py
new file mode 100644
index 00000000000..9ee36e6a0fd
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_versions_operations.py
@@ -0,0 +1,381 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelVersionsOperations:
+ """ModelVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ version: Optional[str] = None,
+ description: Optional[str] = None,
+ offset: Optional[int] = None,
+ tags: Optional[str] = None,
+ properties: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ModelVersionResourceArmPaginatedResult"]:
+ """List model versions.
+
+ List model versions.
+
+ :param name: Model name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param version: Model version.
+ :type version: str
+ :param description: Model description.
+ :type description: str
+ :param offset: Number of initial results to skip.
+ :type offset: int
+ :param tags: Comma-separated list of tag names (and optionally values). Example:
+ tag1,tag2=value2.
+ :type tags: str
+ :param properties: Comma-separated list of property names (and optionally values). Example:
+ prop1,prop2=value2.
+ :type properties: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ModelVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if version is not None:
+ query_parameters['version'] = self._serialize.query("version", version, 'str')
+ if description is not None:
+ query_parameters['description'] = self._serialize.query("description", description, 'str')
+ if offset is not None:
+ query_parameters['offset'] = self._serialize.query("offset", offset, 'int')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ModelVersionResource",
+ **kwargs
+ ) -> "models.ModelVersionResource":
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ModelVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py
new file mode 100644
index 00000000000..37a6174a37e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py
@@ -0,0 +1,219 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class NotebooksOperations:
+ """NotebooksOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def _prepare_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> Optional["models.NotebookResourceInfo"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def begin_prepare(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller["models.NotebookResourceInfo"]:
+ """prepare.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._prepare_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListNotebookKeysResult":
+ """list_keys.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_deployments_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_deployments_operations.py
new file mode 100644
index 00000000000..a5f84d4a0fc
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_deployments_operations.py
@@ -0,0 +1,714 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineDeploymentsOperations:
+ """OnlineDeploymentsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]:
+ """List Inference Endpoint Deployments.
+
+ List Inference Endpoint Deployments.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Top of list.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Delete Inference Endpoint Deployment.
+
+ Delete Inference Endpoint Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def get(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.OnlineDeploymentTrackedResource":
+ """Get Inference Deployment Deployment.
+
+ Get Inference Deployment Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineDeploymentTrackedResource",
+ **kwargs
+ ) -> "models.OnlineDeploymentTrackedResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineDeploymentTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineDeploymentTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineDeploymentTrackedResource"]:
+ """Create or update Inference Endpoint Deployment.
+
+ Create or update Inference Endpoint Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Inference Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineDeploymentPartialTrackedResource",
+ **kwargs
+ ) -> Optional["models.OnlineDeploymentTrackedResource"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineDeploymentTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineDeploymentPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineDeploymentPartialTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineDeploymentTrackedResource"]:
+ """Update Online Deployment.
+
+ Update Online Deployment.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineDeploymentPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def get_logs(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DeploymentLogsRequest",
+ **kwargs
+ ) -> "models.DeploymentLogs":
+ """Polls an Endpoint operation.
+
+ Polls an Endpoint operation.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The name and identifier for the endpoint.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The request containing parameters for retrieving logs.
+ :type body: ~azure_machine_learning_workspaces.models.DeploymentLogsRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DeploymentLogs, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DeploymentLogs
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DeploymentLogs"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_logs.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DeploymentLogsRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DeploymentLogs', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_logs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_endpoints_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_endpoints_operations.py
new file mode 100644
index 00000000000..960b64c0c02
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_endpoints_operations.py
@@ -0,0 +1,894 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineEndpointsOperations:
+ """OnlineEndpointsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ name: Optional[str] = None,
+ count: Optional[int] = None,
+ compute_type: Optional[Union[str, "models.EndpointComputeType"]] = None,
+ skiptoken: Optional[str] = None,
+ tags: Optional[str] = None,
+ properties: Optional[str] = None,
+ order_by: Optional[Union[str, "models.OrderString"]] = None,
+ **kwargs
+ ) -> AsyncIterable["models.OnlineEndpointTrackedResourceArmPaginatedResult"]:
+ """List Online Endpoints.
+
+ List Online Endpoints.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param name: Name of the endpoint.
+ :type name: str
+ :param count: Number of endpoints to be retrieved in a page of results.
+ :type count: int
+ :param compute_type: EndpointComputeType to be filtered by.
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param tags: A set of tags with which to filter the returned models. It is a comma separated
+ string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned models. It is a comma
+ separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param order_by: The option to order the response.
+ :type order_by: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if name is not None:
+ query_parameters['name'] = self._serialize.query("name", name, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if compute_type is not None:
+ query_parameters['computeType'] = self._serialize.query("compute_type", compute_type, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Delete Online Endpoint.
+
+ Delete Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def get(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.OnlineEndpointTrackedResource":
+ """Get Online Endpoint.
+
+ Get Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineEndpointTrackedResource",
+ **kwargs
+ ) -> "models.OnlineEndpointTrackedResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineEndpointTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineEndpointTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineEndpointTrackedResource"]:
+ """Create or update Online Endpoint.
+
+ Create or update Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineEndpointPartialTrackedResource",
+ **kwargs
+ ) -> Optional["models.OnlineEndpointTrackedResource"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineEndpointTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineEndpointPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineEndpointPartialTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineEndpointTrackedResource"]:
+ """Update Online Endpoint.
+
+ Update Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineEndpointPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def _regenerate_keys_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.RegenerateEndpointKeysRequest",
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._regenerate_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'RegenerateEndpointKeysRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _regenerate_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ async def begin_regenerate_keys(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.RegenerateEndpointKeysRequest",
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: RegenerateKeys request .
+ :type body: ~azure_machine_learning_workspaces.models.RegenerateEndpointKeysRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._regenerate_keys_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ async def list_keys(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EndpointAuthKeys":
+ """List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthKeys, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthKeys"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthKeys', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys'} # type: ignore
+
+ async def get_token(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EndpointAuthToken":
+ """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthToken, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthToken
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthToken"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthToken', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
new file mode 100644
index 00000000000..68329c27b65
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations:
+ """Operations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.OperationListResult"]:
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..e7dcc71dcd0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,238 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations:
+ """PrivateEndpointConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def put(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ properties: "models.PrivateEndpointConnection",
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.put.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> None:
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..da29aa1910e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
@@ -0,0 +1,99 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations:
+ """PrivateLinkResourcesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.PrivateLinkResourceListResult":
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
new file mode 100644
index 00000000000..daa5c1a71c0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
@@ -0,0 +1,175 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations:
+ """QuotasOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def update(
+ self,
+ location: str,
+ parameters: "models.QuotaUpdateParameters",
+ **kwargs
+ ) -> "models.UpdateWorkspaceQuotasResult":
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListWorkspaceQuotas"]:
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
new file mode 100644
index 00000000000..38abd71c5bc
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
@@ -0,0 +1,113 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations:
+ """UsagesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListUsagesResult"]:
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..b4e72f1d89c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,95 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations:
+ """VirtualMachineSizesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> "models.VirtualMachineSizeListResult":
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..e9bce3db9f3
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
@@ -0,0 +1,321 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations:
+ """WorkspaceConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ target: Optional[str] = None,
+ category: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedWorkspaceConnectionsList"]:
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ async def create(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ parameters: "models.WorkspaceConnectionDto",
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Add a new workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnectionDto
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..eb04045c5e7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations:
+ """WorkspaceFeaturesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListAmlUserFeatureResult"]:
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..157fb3d8db2
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
@@ -0,0 +1,674 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations:
+ """WorkspacesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.Workspace":
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> Optional["models.Workspace"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> AsyncLROPoller["models.Workspace"]:
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.WorkspaceUpdateParameters",
+ **kwargs
+ ) -> "models.Workspace":
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListWorkspaceKeysResult":
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ async def resync_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.resync_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
new file mode 100644
index 00000000000..8d5d8030e82
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
@@ -0,0 +1,1095 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import AccountKeySection
+ from ._models_py3 import AciServiceCreateRequest
+ from ._models_py3 import AciServiceCreateRequestDataCollection
+ from ._models_py3 import AciServiceCreateRequestEncryptionProperties
+ from ._models_py3 import AciServiceCreateRequestVnetConfiguration
+ from ._models_py3 import AciServiceResponse
+ from ._models_py3 import AciServiceResponseDataCollection
+ from ._models_py3 import AciServiceResponseEncryptionProperties
+ from ._models_py3 import AciServiceResponseEnvironmentImageRequest
+ from ._models_py3 import AciServiceResponseVnetConfiguration
+ from ._models_py3 import Aks
+ from ._models_py3 import AksComputeConfiguration
+ from ._models_py3 import AksComputeSecrets
+ from ._models_py3 import AksDeploymentConfiguration
+ from ._models_py3 import AksNetworkingConfiguration
+ from ._models_py3 import AksProperties
+ from ._models_py3 import AksReplicaStatus
+ from ._models_py3 import AksReplicaStatusError
+ from ._models_py3 import AksServiceCreateRequest
+ from ._models_py3 import AksServiceCreateRequestAutoScaler
+ from ._models_py3 import AksServiceCreateRequestDataCollection
+ from ._models_py3 import AksServiceCreateRequestLivenessProbeRequirements
+ from ._models_py3 import AksServiceResponse
+ from ._models_py3 import AksServiceResponseAutoScaler
+ from ._models_py3 import AksServiceResponseDataCollection
+ from ._models_py3 import AksServiceResponseDeploymentStatus
+ from ._models_py3 import AksServiceResponseEnvironmentImageRequest
+ from ._models_py3 import AksServiceResponseLivenessProbeRequirements
+ from ._models_py3 import AksVariantResponse
+ from ._models_py3 import AmlCompute
+ from ._models_py3 import AmlComputeNodeInformation
+ from ._models_py3 import AmlComputeNodesInformation
+ from ._models_py3 import AmlComputeProperties
+ from ._models_py3 import AmlTokenConfiguration
+ from ._models_py3 import AmlUserFeature
+ from ._models_py3 import AssetPath
+ from ._models_py3 import AssetReferenceBase
+ from ._models_py3 import AssignedUser
+ from ._models_py3 import AuthKeys
+ from ._models_py3 import AutoMlJob
+ from ._models_py3 import AutoScaler
+ from ._models_py3 import AzureDataLakeSection
+ from ._models_py3 import AzureMlComputeConfiguration
+ from ._models_py3 import AzureMySqlSection
+ from ._models_py3 import AzurePostgreSqlSection
+ from ._models_py3 import AzureSqlDatabaseSection
+ from ._models_py3 import AzureStorageSection
+ from ._models_py3 import BanditPolicyConfiguration
+ from ._models_py3 import CertificateSection
+ from ._models_py3 import ClusterUpdateParameters
+ from ._models_py3 import CocoExportSummary
+ from ._models_py3 import CodeConfiguration
+ from ._models_py3 import CodeContainerResource
+ from ._models_py3 import CodeContainerResourceArmPaginatedResult
+ from ._models_py3 import CodeVersionResource
+ from ._models_py3 import CodeVersionResourceArmPaginatedResult
+ from ._models_py3 import CommandJob
+ from ._models_py3 import Component
+ from ._models_py3 import ComponentContainerResource
+ from ._models_py3 import ComponentContainerResourceArmPaginatedResult
+ from ._models_py3 import ComponentInput
+ from ._models_py3 import ComponentInputEnum
+ from ._models_py3 import ComponentInputGeneric
+ from ._models_py3 import ComponentInputRangedNumber
+ from ._models_py3 import ComponentJob
+ from ._models_py3 import ComponentJobInput
+ from ._models_py3 import ComponentJobOutput
+ from ._models_py3 import ComponentOutput
+ from ._models_py3 import ComponentVersionResource
+ from ._models_py3 import ComponentVersionResourceArmPaginatedResult
+ from ._models_py3 import Compute
+ from ._models_py3 import ComputeBinding
+ from ._models_py3 import ComputeConfiguration
+ from ._models_py3 import ComputeInstance
+ from ._models_py3 import ComputeInstanceApplication
+ from ._models_py3 import ComputeInstanceConnectivityEndpoints
+ from ._models_py3 import ComputeInstanceCreatedBy
+ from ._models_py3 import ComputeInstanceLastOperation
+ from ._models_py3 import ComputeInstanceProperties
+ from ._models_py3 import ComputeInstanceSshSettings
+ from ._models_py3 import ComputeJobBase
+ from ._models_py3 import ComputeNodesInformation
+ from ._models_py3 import ComputeResource
+ from ._models_py3 import ComputeSecrets
+ from ._models_py3 import ContainerRegistry
+ from ._models_py3 import ContainerRegistryResponse
+ from ._models_py3 import ContainerResourceRequirements
+ from ._models_py3 import CreateEndpointVariantRequest
+ from ._models_py3 import CreateServiceRequest
+ from ._models_py3 import CreateServiceRequestEnvironmentImageRequest
+ from ._models_py3 import CreateServiceRequestKeys
+ from ._models_py3 import CsvExportSummary
+ from ._models_py3 import DataBinding
+ from ._models_py3 import DataContainerResource
+ from ._models_py3 import DataContainerResourceArmPaginatedResult
+ from ._models_py3 import DataFactory
+ from ._models_py3 import DataLakeAnalytics
+ from ._models_py3 import DataLakeAnalyticsProperties
+ from ._models_py3 import DataPathAssetReference
+ from ._models_py3 import DataSettings
+ from ._models_py3 import DataVersionResource
+ from ._models_py3 import DataVersionResourceArmPaginatedResult
+ from ._models_py3 import Databricks
+ from ._models_py3 import DatabricksComputeSecrets
+ from ._models_py3 import DatabricksProperties
+ from ._models_py3 import DatasetExportSummary
+ from ._models_py3 import DatasetReference
+ from ._models_py3 import DatastoreContents
+ from ._models_py3 import DatastoreCredentials
+ from ._models_py3 import DatastorePropertiesResource
+ from ._models_py3 import DatastorePropertiesResourceArmPaginatedResult
+ from ._models_py3 import DeploymentConfigurationBase
+ from ._models_py3 import DeploymentLogs
+ from ._models_py3 import DeploymentLogsRequest
+ from ._models_py3 import DistributionConfiguration
+ from ._models_py3 import DockerBuild
+ from ._models_py3 import DockerImage
+ from ._models_py3 import DockerImagePlatform
+ from ._models_py3 import DockerSpecification
+ from ._models_py3 import EarlyTerminationPolicyConfiguration
+ from ._models_py3 import EncryptionProperties
+ from ._models_py3 import EncryptionProperty
+ from ._models_py3 import EndpointAuthKeys
+ from ._models_py3 import EndpointAuthToken
+ from ._models_py3 import EnvironmentContainerResource
+ from ._models_py3 import EnvironmentContainerResourceArmPaginatedResult
+ from ._models_py3 import EnvironmentImageRequest
+ from ._models_py3 import EnvironmentImageRequestEnvironment
+ from ._models_py3 import EnvironmentImageRequestEnvironmentReference
+ from ._models_py3 import EnvironmentImageResponse
+ from ._models_py3 import EnvironmentImageResponseEnvironment
+ from ._models_py3 import EnvironmentImageResponseEnvironmentReference
+ from ._models_py3 import EnvironmentReference
+ from ._models_py3 import EnvironmentSpecificationVersionResource
+ from ._models_py3 import EnvironmentSpecificationVersionResourceArmPaginatedResult
+ from ._models_py3 import ErrorDetail
+ from ._models_py3 import ErrorResponse
+ from ._models_py3 import EstimatedVmPrice
+ from ._models_py3 import EstimatedVmPrices
+ from ._models_py3 import EvaluationConfiguration
+ from ._models_py3 import ExperimentLimits
+ from ._models_py3 import ExportSummary
+ from ._models_py3 import FeaturizationSettings
+ from ._models_py3 import ForecastingSettings
+ from ._models_py3 import GeneralSettings
+ from ._models_py3 import GlusterFsSection
+ from ._models_py3 import HdInsight
+ from ._models_py3 import HdInsightProperties
+ from ._models_py3 import IdAssetReference
+ from ._models_py3 import Identity
+ from ._models_py3 import IdentityConfiguration
+ from ._models_py3 import ImageAsset
+ from ._models_py3 import InferenceContainerProperties
+ from ._models_py3 import InputData
+ from ._models_py3 import JobBase
+ from ._models_py3 import JobBaseInteractionEndpoints
+ from ._models_py3 import JobBaseResource
+ from ._models_py3 import JobBaseResourceArmPaginatedResult
+ from ._models_py3 import JobOutput
+ from ._models_py3 import KeyVaultProperties
+ from ._models_py3 import LabelCategory
+ from ._models_py3 import LabelClass
+ from ._models_py3 import LabelingDatasetConfiguration
+ from ._models_py3 import LabelingJob
+ from ._models_py3 import LabelingJobImageProperties
+ from ._models_py3 import LabelingJobInstructions
+ from ._models_py3 import LabelingJobMediaProperties
+ from ._models_py3 import LabelingJobResource
+ from ._models_py3 import LabelingJobResourceArmPaginatedResult
+ from ._models_py3 import LabelingJobTextProperties
+ from ._models_py3 import LinkedInfo
+ from ._models_py3 import LinkedServiceList
+ from ._models_py3 import LinkedServiceProps
+ from ._models_py3 import LinkedServiceRequest
+ from ._models_py3 import LinkedServiceResponse
+ from ._models_py3 import ListAmlUserFeatureResult
+ from ._models_py3 import ListNotebookKeysResult
+ from ._models_py3 import ListUsagesResult
+ from ._models_py3 import ListWorkspaceKeysResult
+ from ._models_py3 import ListWorkspaceQuotas
+ from ._models_py3 import LivenessProbeRequirements
+ from ._models_py3 import MachineLearningServiceError
+ from ._models_py3 import ManagedComputeConfiguration
+ from ._models_py3 import ManagedDeploymentConfiguration
+ from ._models_py3 import ManagedIdentityConfiguration
+ from ._models_py3 import MedianStoppingPolicyConfiguration
+ from ._models_py3 import MlAssistConfiguration
+ from ._models_py3 import Model
+ from ._models_py3 import ModelContainerResource
+ from ._models_py3 import ModelContainerResourceArmPaginatedResult
+ from ._models_py3 import ModelDataCollection
+ from ._models_py3 import ModelDockerSection
+ from ._models_py3 import ModelDockerSectionBaseImageRegistry
+ from ._models_py3 import ModelDockerSectionResponse
+ from ._models_py3 import ModelDockerSectionResponseBaseImageRegistry
+ from ._models_py3 import ModelEnvironmentDefinition
+ from ._models_py3 import ModelEnvironmentDefinitionDocker
+ from ._models_py3 import ModelEnvironmentDefinitionPython
+ from ._models_py3 import ModelEnvironmentDefinitionR
+ from ._models_py3 import ModelEnvironmentDefinitionResponse
+ from ._models_py3 import ModelEnvironmentDefinitionResponseDocker
+ from ._models_py3 import ModelEnvironmentDefinitionResponsePython
+ from ._models_py3 import ModelEnvironmentDefinitionResponseR
+ from ._models_py3 import ModelEnvironmentDefinitionResponseSpark
+ from ._models_py3 import ModelEnvironmentDefinitionSpark
+ from ._models_py3 import ModelPythonSection
+ from ._models_py3 import ModelSparkSection
+ from ._models_py3 import ModelVersionResource
+ from ._models_py3 import ModelVersionResourceArmPaginatedResult
+ from ._models_py3 import Mpi
+ from ._models_py3 import NodeStateCounts
+ from ._models_py3 import NotebookListCredentialsResult
+ from ._models_py3 import NotebookPreparationError
+ from ._models_py3 import NotebookResourceInfo
+ from ._models_py3 import OnlineDeploymentScaleSettings
+ from ._models_py3 import OnlineDeploymentTrackedResource
+ from ._models_py3 import OnlineDeploymentTrackedResourceArmPaginatedResult
+ from ._models_py3 import OnlineEndpointTrackedResource
+ from ._models_py3 import OnlineEndpointTrackedResourceArmPaginatedResult
+ from ._models_py3 import Operation
+ from ._models_py3 import OperationDisplay
+ from ._models_py3 import OperationListResult
+ from ._models_py3 import OutputData
+ from ._models_py3 import OutputPathAssetReference
+ from ._models_py3 import PaginatedComputeResourcesList
+ from ._models_py3 import PaginatedServiceList
+ from ._models_py3 import PaginatedWorkspaceConnectionsList
+ from ._models_py3 import ParameterSamplingConfiguration
+ from ._models_py3 import PartialOnlineDeployment
+ from ._models_py3 import PartialOnlineDeploymentPartialTrackedResource
+ from ._models_py3 import PartialOnlineEndpoint
+ from ._models_py3 import PartialOnlineEndpointPartialTrackedResource
+ from ._models_py3 import Password
+ from ._models_py3 import PersonalComputeInstanceSettings
+ from ._models_py3 import Pipeline
+ from ._models_py3 import PipelineInput
+ from ._models_py3 import PipelineJob
+ from ._models_py3 import PipelineOutput
+ from ._models_py3 import PrivateEndpoint
+ from ._models_py3 import PrivateEndpointConnection
+ from ._models_py3 import PrivateLinkResource
+ from ._models_py3 import PrivateLinkResourceListResult
+ from ._models_py3 import PrivateLinkServiceConnectionState
+ from ._models_py3 import ProgressMetrics
+ from ._models_py3 import PyTorch
+ from ._models_py3 import QuotaBaseProperties
+ from ._models_py3 import QuotaUpdateParameters
+ from ._models_py3 import RCranPackage
+ from ._models_py3 import RGitHubPackage
+ from ._models_py3 import RGitHubPackageResponse
+ from ._models_py3 import RSection
+ from ._models_py3 import RSectionResponse
+ from ._models_py3 import RegenerateEndpointKeysRequest
+ from ._models_py3 import RegistryListCredentialsResult
+ from ._models_py3 import Resource
+ from ._models_py3 import ResourceId
+ from ._models_py3 import ResourceIdentity
+ from ._models_py3 import ResourceName
+ from ._models_py3 import ResourceQuota
+ from ._models_py3 import ResourceSkuLocationInfo
+ from ._models_py3 import ResourceSkuZoneDetails
+ from ._models_py3 import Restriction
+ from ._models_py3 import Route
+ from ._models_py3 import SasSection
+ from ._models_py3 import ScaleSettings
+ from ._models_py3 import ScriptReference
+ from ._models_py3 import ScriptsToExecute
+ from ._models_py3 import ServicePrincipalConfiguration
+ from ._models_py3 import ServicePrincipalCredentials
+ from ._models_py3 import ServicePrincipalSection
+ from ._models_py3 import ServiceResource
+ from ._models_py3 import ServiceResponseBase
+ from ._models_py3 import ServiceResponseBaseError
+ from ._models_py3 import SetupScripts
+ from ._models_py3 import SharedPrivateLinkResource
+ from ._models_py3 import Sku
+ from ._models_py3 import SkuCapability
+ from ._models_py3 import SkuListResult
+ from ._models_py3 import SparkMavenPackage
+ from ._models_py3 import SqlAdminSection
+ from ._models_py3 import SslConfiguration
+ from ._models_py3 import StatusMessage
+ from ._models_py3 import SweepJob
+ from ._models_py3 import SystemData
+ from ._models_py3 import SystemService
+ from ._models_py3 import TensorFlow
+ from ._models_py3 import TerminationConfiguration
+ from ._models_py3 import TrainingDataSettings
+ from ._models_py3 import TrainingSettings
+ from ._models_py3 import TrialComponent
+ from ._models_py3 import TruncationSelectionPolicyConfiguration
+ from ._models_py3 import UpdateWorkspaceQuotas
+ from ._models_py3 import UpdateWorkspaceQuotasResult
+ from ._models_py3 import Usage
+ from ._models_py3 import UsageName
+ from ._models_py3 import UserAccountCredentials
+ from ._models_py3 import UserAssignedIdentity
+ from ._models_py3 import UserAssignedIdentityMeta
+ from ._models_py3 import ValidationDataSettings
+ from ._models_py3 import VirtualMachine
+ from ._models_py3 import VirtualMachineImage
+ from ._models_py3 import VirtualMachineProperties
+ from ._models_py3 import VirtualMachineSecrets
+ from ._models_py3 import VirtualMachineSize
+ from ._models_py3 import VirtualMachineSizeListResult
+ from ._models_py3 import VirtualMachineSshCredentials
+ from ._models_py3 import VnetConfiguration
+ from ._models_py3 import Workspace
+ from ._models_py3 import WorkspaceConnection
+ from ._models_py3 import WorkspaceConnectionDto
+ from ._models_py3 import WorkspaceListResult
+ from ._models_py3 import WorkspaceSku
+ from ._models_py3 import WorkspaceUpdateParameters
+except (SyntaxError, ImportError):
+ from ._models import AccountKeySection # type: ignore
+ from ._models import AciServiceCreateRequest # type: ignore
+ from ._models import AciServiceCreateRequestDataCollection # type: ignore
+ from ._models import AciServiceCreateRequestEncryptionProperties # type: ignore
+ from ._models import AciServiceCreateRequestVnetConfiguration # type: ignore
+ from ._models import AciServiceResponse # type: ignore
+ from ._models import AciServiceResponseDataCollection # type: ignore
+ from ._models import AciServiceResponseEncryptionProperties # type: ignore
+ from ._models import AciServiceResponseEnvironmentImageRequest # type: ignore
+ from ._models import AciServiceResponseVnetConfiguration # type: ignore
+ from ._models import Aks # type: ignore
+ from ._models import AksComputeConfiguration # type: ignore
+ from ._models import AksComputeSecrets # type: ignore
+ from ._models import AksDeploymentConfiguration # type: ignore
+ from ._models import AksNetworkingConfiguration # type: ignore
+ from ._models import AksProperties # type: ignore
+ from ._models import AksReplicaStatus # type: ignore
+ from ._models import AksReplicaStatusError # type: ignore
+ from ._models import AksServiceCreateRequest # type: ignore
+ from ._models import AksServiceCreateRequestAutoScaler # type: ignore
+ from ._models import AksServiceCreateRequestDataCollection # type: ignore
+ from ._models import AksServiceCreateRequestLivenessProbeRequirements # type: ignore
+ from ._models import AksServiceResponse # type: ignore
+ from ._models import AksServiceResponseAutoScaler # type: ignore
+ from ._models import AksServiceResponseDataCollection # type: ignore
+ from ._models import AksServiceResponseDeploymentStatus # type: ignore
+ from ._models import AksServiceResponseEnvironmentImageRequest # type: ignore
+ from ._models import AksServiceResponseLivenessProbeRequirements # type: ignore
+ from ._models import AksVariantResponse # type: ignore
+ from ._models import AmlCompute # type: ignore
+ from ._models import AmlComputeNodeInformation # type: ignore
+ from ._models import AmlComputeNodesInformation # type: ignore
+ from ._models import AmlComputeProperties # type: ignore
+ from ._models import AmlTokenConfiguration # type: ignore
+ from ._models import AmlUserFeature # type: ignore
+ from ._models import AssetPath # type: ignore
+ from ._models import AssetReferenceBase # type: ignore
+ from ._models import AssignedUser # type: ignore
+ from ._models import AuthKeys # type: ignore
+ from ._models import AutoMlJob # type: ignore
+ from ._models import AutoScaler # type: ignore
+ from ._models import AzureDataLakeSection # type: ignore
+ from ._models import AzureMlComputeConfiguration # type: ignore
+ from ._models import AzureMySqlSection # type: ignore
+ from ._models import AzurePostgreSqlSection # type: ignore
+ from ._models import AzureSqlDatabaseSection # type: ignore
+ from ._models import AzureStorageSection # type: ignore
+ from ._models import BanditPolicyConfiguration # type: ignore
+ from ._models import CertificateSection # type: ignore
+ from ._models import ClusterUpdateParameters # type: ignore
+ from ._models import CocoExportSummary # type: ignore
+ from ._models import CodeConfiguration # type: ignore
+ from ._models import CodeContainerResource # type: ignore
+ from ._models import CodeContainerResourceArmPaginatedResult # type: ignore
+ from ._models import CodeVersionResource # type: ignore
+ from ._models import CodeVersionResourceArmPaginatedResult # type: ignore
+ from ._models import CommandJob # type: ignore
+ from ._models import Component # type: ignore
+ from ._models import ComponentContainerResource # type: ignore
+ from ._models import ComponentContainerResourceArmPaginatedResult # type: ignore
+ from ._models import ComponentInput # type: ignore
+ from ._models import ComponentInputEnum # type: ignore
+ from ._models import ComponentInputGeneric # type: ignore
+ from ._models import ComponentInputRangedNumber # type: ignore
+ from ._models import ComponentJob # type: ignore
+ from ._models import ComponentJobInput # type: ignore
+ from ._models import ComponentJobOutput # type: ignore
+ from ._models import ComponentOutput # type: ignore
+ from ._models import ComponentVersionResource # type: ignore
+ from ._models import ComponentVersionResourceArmPaginatedResult # type: ignore
+ from ._models import Compute # type: ignore
+ from ._models import ComputeBinding # type: ignore
+ from ._models import ComputeConfiguration # type: ignore
+ from ._models import ComputeInstance # type: ignore
+ from ._models import ComputeInstanceApplication # type: ignore
+ from ._models import ComputeInstanceConnectivityEndpoints # type: ignore
+ from ._models import ComputeInstanceCreatedBy # type: ignore
+ from ._models import ComputeInstanceLastOperation # type: ignore
+ from ._models import ComputeInstanceProperties # type: ignore
+ from ._models import ComputeInstanceSshSettings # type: ignore
+ from ._models import ComputeJobBase # type: ignore
+ from ._models import ComputeNodesInformation # type: ignore
+ from ._models import ComputeResource # type: ignore
+ from ._models import ComputeSecrets # type: ignore
+ from ._models import ContainerRegistry # type: ignore
+ from ._models import ContainerRegistryResponse # type: ignore
+ from ._models import ContainerResourceRequirements # type: ignore
+ from ._models import CreateEndpointVariantRequest # type: ignore
+ from ._models import CreateServiceRequest # type: ignore
+ from ._models import CreateServiceRequestEnvironmentImageRequest # type: ignore
+ from ._models import CreateServiceRequestKeys # type: ignore
+ from ._models import CsvExportSummary # type: ignore
+ from ._models import DataBinding # type: ignore
+ from ._models import DataContainerResource # type: ignore
+ from ._models import DataContainerResourceArmPaginatedResult # type: ignore
+ from ._models import DataFactory # type: ignore
+ from ._models import DataLakeAnalytics # type: ignore
+ from ._models import DataLakeAnalyticsProperties # type: ignore
+ from ._models import DataPathAssetReference # type: ignore
+ from ._models import DataSettings # type: ignore
+ from ._models import DataVersionResource # type: ignore
+ from ._models import DataVersionResourceArmPaginatedResult # type: ignore
+ from ._models import Databricks # type: ignore
+ from ._models import DatabricksComputeSecrets # type: ignore
+ from ._models import DatabricksProperties # type: ignore
+ from ._models import DatasetExportSummary # type: ignore
+ from ._models import DatasetReference # type: ignore
+ from ._models import DatastoreContents # type: ignore
+ from ._models import DatastoreCredentials # type: ignore
+ from ._models import DatastorePropertiesResource # type: ignore
+ from ._models import DatastorePropertiesResourceArmPaginatedResult # type: ignore
+ from ._models import DeploymentConfigurationBase # type: ignore
+ from ._models import DeploymentLogs # type: ignore
+ from ._models import DeploymentLogsRequest # type: ignore
+ from ._models import DistributionConfiguration # type: ignore
+ from ._models import DockerBuild # type: ignore
+ from ._models import DockerImage # type: ignore
+ from ._models import DockerImagePlatform # type: ignore
+ from ._models import DockerSpecification # type: ignore
+ from ._models import EarlyTerminationPolicyConfiguration # type: ignore
+ from ._models import EncryptionProperties # type: ignore
+ from ._models import EncryptionProperty # type: ignore
+ from ._models import EndpointAuthKeys # type: ignore
+ from ._models import EndpointAuthToken # type: ignore
+ from ._models import EnvironmentContainerResource # type: ignore
+ from ._models import EnvironmentContainerResourceArmPaginatedResult # type: ignore
+ from ._models import EnvironmentImageRequest # type: ignore
+ from ._models import EnvironmentImageRequestEnvironment # type: ignore
+ from ._models import EnvironmentImageRequestEnvironmentReference # type: ignore
+ from ._models import EnvironmentImageResponse # type: ignore
+ from ._models import EnvironmentImageResponseEnvironment # type: ignore
+ from ._models import EnvironmentImageResponseEnvironmentReference # type: ignore
+ from ._models import EnvironmentReference # type: ignore
+ from ._models import EnvironmentSpecificationVersionResource # type: ignore
+ from ._models import EnvironmentSpecificationVersionResourceArmPaginatedResult # type: ignore
+ from ._models import ErrorDetail # type: ignore
+ from ._models import ErrorResponse # type: ignore
+ from ._models import EstimatedVmPrice # type: ignore
+ from ._models import EstimatedVmPrices # type: ignore
+ from ._models import EvaluationConfiguration # type: ignore
+ from ._models import ExperimentLimits # type: ignore
+ from ._models import ExportSummary # type: ignore
+ from ._models import FeaturizationSettings # type: ignore
+ from ._models import ForecastingSettings # type: ignore
+ from ._models import GeneralSettings # type: ignore
+ from ._models import GlusterFsSection # type: ignore
+ from ._models import HdInsight # type: ignore
+ from ._models import HdInsightProperties # type: ignore
+ from ._models import IdAssetReference # type: ignore
+ from ._models import Identity # type: ignore
+ from ._models import IdentityConfiguration # type: ignore
+ from ._models import ImageAsset # type: ignore
+ from ._models import InferenceContainerProperties # type: ignore
+ from ._models import InputData # type: ignore
+ from ._models import JobBase # type: ignore
+ from ._models import JobBaseInteractionEndpoints # type: ignore
+ from ._models import JobBaseResource # type: ignore
+ from ._models import JobBaseResourceArmPaginatedResult # type: ignore
+ from ._models import JobOutput # type: ignore
+ from ._models import KeyVaultProperties # type: ignore
+ from ._models import LabelCategory # type: ignore
+ from ._models import LabelClass # type: ignore
+ from ._models import LabelingDatasetConfiguration # type: ignore
+ from ._models import LabelingJob # type: ignore
+ from ._models import LabelingJobImageProperties # type: ignore
+ from ._models import LabelingJobInstructions # type: ignore
+ from ._models import LabelingJobMediaProperties # type: ignore
+ from ._models import LabelingJobResource # type: ignore
+ from ._models import LabelingJobResourceArmPaginatedResult # type: ignore
+ from ._models import LabelingJobTextProperties # type: ignore
+ from ._models import LinkedInfo # type: ignore
+ from ._models import LinkedServiceList # type: ignore
+ from ._models import LinkedServiceProps # type: ignore
+ from ._models import LinkedServiceRequest # type: ignore
+ from ._models import LinkedServiceResponse # type: ignore
+ from ._models import ListAmlUserFeatureResult # type: ignore
+ from ._models import ListNotebookKeysResult # type: ignore
+ from ._models import ListUsagesResult # type: ignore
+ from ._models import ListWorkspaceKeysResult # type: ignore
+ from ._models import ListWorkspaceQuotas # type: ignore
+ from ._models import LivenessProbeRequirements # type: ignore
+ from ._models import MachineLearningServiceError # type: ignore
+ from ._models import ManagedComputeConfiguration # type: ignore
+ from ._models import ManagedDeploymentConfiguration # type: ignore
+ from ._models import ManagedIdentityConfiguration # type: ignore
+ from ._models import MedianStoppingPolicyConfiguration # type: ignore
+ from ._models import MlAssistConfiguration # type: ignore
+ from ._models import Model # type: ignore
+ from ._models import ModelContainerResource # type: ignore
+ from ._models import ModelContainerResourceArmPaginatedResult # type: ignore
+ from ._models import ModelDataCollection # type: ignore
+ from ._models import ModelDockerSection # type: ignore
+ from ._models import ModelDockerSectionBaseImageRegistry # type: ignore
+ from ._models import ModelDockerSectionResponse # type: ignore
+ from ._models import ModelDockerSectionResponseBaseImageRegistry # type: ignore
+ from ._models import ModelEnvironmentDefinition # type: ignore
+ from ._models import ModelEnvironmentDefinitionDocker # type: ignore
+ from ._models import ModelEnvironmentDefinitionPython # type: ignore
+ from ._models import ModelEnvironmentDefinitionR # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponse # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponseDocker # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponsePython # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponseR # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponseSpark # type: ignore
+ from ._models import ModelEnvironmentDefinitionSpark # type: ignore
+ from ._models import ModelPythonSection # type: ignore
+ from ._models import ModelSparkSection # type: ignore
+ from ._models import ModelVersionResource # type: ignore
+ from ._models import ModelVersionResourceArmPaginatedResult # type: ignore
+ from ._models import Mpi # type: ignore
+ from ._models import NodeStateCounts # type: ignore
+ from ._models import NotebookListCredentialsResult # type: ignore
+ from ._models import NotebookPreparationError # type: ignore
+ from ._models import NotebookResourceInfo # type: ignore
+ from ._models import OnlineDeploymentScaleSettings # type: ignore
+ from ._models import OnlineDeploymentTrackedResource # type: ignore
+ from ._models import OnlineDeploymentTrackedResourceArmPaginatedResult # type: ignore
+ from ._models import OnlineEndpointTrackedResource # type: ignore
+ from ._models import OnlineEndpointTrackedResourceArmPaginatedResult # type: ignore
+ from ._models import Operation # type: ignore
+ from ._models import OperationDisplay # type: ignore
+ from ._models import OperationListResult # type: ignore
+ from ._models import OutputData # type: ignore
+ from ._models import OutputPathAssetReference # type: ignore
+ from ._models import PaginatedComputeResourcesList # type: ignore
+ from ._models import PaginatedServiceList # type: ignore
+ from ._models import PaginatedWorkspaceConnectionsList # type: ignore
+ from ._models import ParameterSamplingConfiguration # type: ignore
+ from ._models import PartialOnlineDeployment # type: ignore
+ from ._models import PartialOnlineDeploymentPartialTrackedResource # type: ignore
+ from ._models import PartialOnlineEndpoint # type: ignore
+ from ._models import PartialOnlineEndpointPartialTrackedResource # type: ignore
+ from ._models import Password # type: ignore
+ from ._models import PersonalComputeInstanceSettings # type: ignore
+ from ._models import Pipeline # type: ignore
+ from ._models import PipelineInput # type: ignore
+ from ._models import PipelineJob # type: ignore
+ from ._models import PipelineOutput # type: ignore
+ from ._models import PrivateEndpoint # type: ignore
+ from ._models import PrivateEndpointConnection # type: ignore
+ from ._models import PrivateLinkResource # type: ignore
+ from ._models import PrivateLinkResourceListResult # type: ignore
+ from ._models import PrivateLinkServiceConnectionState # type: ignore
+ from ._models import ProgressMetrics # type: ignore
+ from ._models import PyTorch # type: ignore
+ from ._models import QuotaBaseProperties # type: ignore
+ from ._models import QuotaUpdateParameters # type: ignore
+ from ._models import RCranPackage # type: ignore
+ from ._models import RGitHubPackage # type: ignore
+ from ._models import RGitHubPackageResponse # type: ignore
+ from ._models import RSection # type: ignore
+ from ._models import RSectionResponse # type: ignore
+ from ._models import RegenerateEndpointKeysRequest # type: ignore
+ from ._models import RegistryListCredentialsResult # type: ignore
+ from ._models import Resource # type: ignore
+ from ._models import ResourceId # type: ignore
+ from ._models import ResourceIdentity # type: ignore
+ from ._models import ResourceName # type: ignore
+ from ._models import ResourceQuota # type: ignore
+ from ._models import ResourceSkuLocationInfo # type: ignore
+ from ._models import ResourceSkuZoneDetails # type: ignore
+ from ._models import Restriction # type: ignore
+ from ._models import Route # type: ignore
+ from ._models import SasSection # type: ignore
+ from ._models import ScaleSettings # type: ignore
+ from ._models import ScriptReference # type: ignore
+ from ._models import ScriptsToExecute # type: ignore
+ from ._models import ServicePrincipalConfiguration # type: ignore
+ from ._models import ServicePrincipalCredentials # type: ignore
+ from ._models import ServicePrincipalSection # type: ignore
+ from ._models import ServiceResource # type: ignore
+ from ._models import ServiceResponseBase # type: ignore
+ from ._models import ServiceResponseBaseError # type: ignore
+ from ._models import SetupScripts # type: ignore
+ from ._models import SharedPrivateLinkResource # type: ignore
+ from ._models import Sku # type: ignore
+ from ._models import SkuCapability # type: ignore
+ from ._models import SkuListResult # type: ignore
+ from ._models import SparkMavenPackage # type: ignore
+ from ._models import SqlAdminSection # type: ignore
+ from ._models import SslConfiguration # type: ignore
+ from ._models import StatusMessage # type: ignore
+ from ._models import SweepJob # type: ignore
+ from ._models import SystemData # type: ignore
+ from ._models import SystemService # type: ignore
+ from ._models import TensorFlow # type: ignore
+ from ._models import TerminationConfiguration # type: ignore
+ from ._models import TrainingDataSettings # type: ignore
+ from ._models import TrainingSettings # type: ignore
+ from ._models import TrialComponent # type: ignore
+ from ._models import TruncationSelectionPolicyConfiguration # type: ignore
+ from ._models import UpdateWorkspaceQuotas # type: ignore
+ from ._models import UpdateWorkspaceQuotasResult # type: ignore
+ from ._models import Usage # type: ignore
+ from ._models import UsageName # type: ignore
+ from ._models import UserAccountCredentials # type: ignore
+ from ._models import UserAssignedIdentity # type: ignore
+ from ._models import UserAssignedIdentityMeta # type: ignore
+ from ._models import ValidationDataSettings # type: ignore
+ from ._models import VirtualMachine # type: ignore
+ from ._models import VirtualMachineImage # type: ignore
+ from ._models import VirtualMachineProperties # type: ignore
+ from ._models import VirtualMachineSecrets # type: ignore
+ from ._models import VirtualMachineSize # type: ignore
+ from ._models import VirtualMachineSizeListResult # type: ignore
+ from ._models import VirtualMachineSshCredentials # type: ignore
+ from ._models import VnetConfiguration # type: ignore
+ from ._models import Workspace # type: ignore
+ from ._models import WorkspaceConnection # type: ignore
+ from ._models import WorkspaceConnectionDto # type: ignore
+ from ._models import WorkspaceListResult # type: ignore
+ from ._models import WorkspaceSku # type: ignore
+ from ._models import WorkspaceUpdateParameters # type: ignore
+
+from ._azure_machine_learning_workspaces_enums import (
+ AllocationState,
+ ApplicationSharingPolicy,
+ AssetGenerator,
+ BillingCurrency,
+ ComponentInputType,
+ ComponentType,
+ ComputeEnvironmentType,
+ ComputeInstanceAuthorizationType,
+ ComputeInstanceState,
+ ComputeType,
+ ContainerType,
+ ContentsType,
+ CreatedByType,
+ CredentialsType,
+ DataBindingMode,
+ DatasetType,
+ DeploymentProvisioningState,
+ DeploymentType,
+ DistributionType,
+ DockerSpecificationType,
+ EarlyTerminationPolicyType,
+ EncryptionStatus,
+ EndpointAuthModeType,
+ EndpointComputeType,
+ EndpointProvisioningState,
+ EnvironmentSpecificationType,
+ ExportFormatType,
+ IdentityType,
+ ImageAnnotationType,
+ JobProvisioningState,
+ JobStatus,
+ JobType,
+ KeyType,
+ MediaType,
+ NodeState,
+ OperatingSystemType,
+ OperationName,
+ OperationStatus,
+ OptimizationMetric,
+ OrderString,
+ OriginType,
+ OsType,
+ OsTypes,
+ ParameterSamplingType,
+ PipelineType,
+ PrimaryMetricGoal,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateEndpointServiceConnectionStatus,
+ ProvisioningState,
+ QuotaUnit,
+ ReasonCode,
+ ReferenceType,
+ RemoteLoginPortPublicAccess,
+ ResourceIdentityAssignment,
+ ResourceIdentityType,
+ ScaleTypeMode,
+ SshPublicAccess,
+ SslConfigurationStatus,
+ Status,
+ StatusMessageLevel,
+ TaskType,
+ TextAnnotationType,
+ UnderlyingResourceAction,
+ UnitOfMeasure,
+ UsageUnit,
+ VariantType,
+ VmPriceOsType,
+ VmPriority,
+ VmTier,
+ WebServiceState,
+)
+
+__all__ = [
+ 'AccountKeySection',
+ 'AciServiceCreateRequest',
+ 'AciServiceCreateRequestDataCollection',
+ 'AciServiceCreateRequestEncryptionProperties',
+ 'AciServiceCreateRequestVnetConfiguration',
+ 'AciServiceResponse',
+ 'AciServiceResponseDataCollection',
+ 'AciServiceResponseEncryptionProperties',
+ 'AciServiceResponseEnvironmentImageRequest',
+ 'AciServiceResponseVnetConfiguration',
+ 'Aks',
+ 'AksComputeConfiguration',
+ 'AksComputeSecrets',
+ 'AksDeploymentConfiguration',
+ 'AksNetworkingConfiguration',
+ 'AksProperties',
+ 'AksReplicaStatus',
+ 'AksReplicaStatusError',
+ 'AksServiceCreateRequest',
+ 'AksServiceCreateRequestAutoScaler',
+ 'AksServiceCreateRequestDataCollection',
+ 'AksServiceCreateRequestLivenessProbeRequirements',
+ 'AksServiceResponse',
+ 'AksServiceResponseAutoScaler',
+ 'AksServiceResponseDataCollection',
+ 'AksServiceResponseDeploymentStatus',
+ 'AksServiceResponseEnvironmentImageRequest',
+ 'AksServiceResponseLivenessProbeRequirements',
+ 'AksVariantResponse',
+ 'AmlCompute',
+ 'AmlComputeNodeInformation',
+ 'AmlComputeNodesInformation',
+ 'AmlComputeProperties',
+ 'AmlTokenConfiguration',
+ 'AmlUserFeature',
+ 'AssetPath',
+ 'AssetReferenceBase',
+ 'AssignedUser',
+ 'AuthKeys',
+ 'AutoMlJob',
+ 'AutoScaler',
+ 'AzureDataLakeSection',
+ 'AzureMlComputeConfiguration',
+ 'AzureMySqlSection',
+ 'AzurePostgreSqlSection',
+ 'AzureSqlDatabaseSection',
+ 'AzureStorageSection',
+ 'BanditPolicyConfiguration',
+ 'CertificateSection',
+ 'ClusterUpdateParameters',
+ 'CocoExportSummary',
+ 'CodeConfiguration',
+ 'CodeContainerResource',
+ 'CodeContainerResourceArmPaginatedResult',
+ 'CodeVersionResource',
+ 'CodeVersionResourceArmPaginatedResult',
+ 'CommandJob',
+ 'Component',
+ 'ComponentContainerResource',
+ 'ComponentContainerResourceArmPaginatedResult',
+ 'ComponentInput',
+ 'ComponentInputEnum',
+ 'ComponentInputGeneric',
+ 'ComponentInputRangedNumber',
+ 'ComponentJob',
+ 'ComponentJobInput',
+ 'ComponentJobOutput',
+ 'ComponentOutput',
+ 'ComponentVersionResource',
+ 'ComponentVersionResourceArmPaginatedResult',
+ 'Compute',
+ 'ComputeBinding',
+ 'ComputeConfiguration',
+ 'ComputeInstance',
+ 'ComputeInstanceApplication',
+ 'ComputeInstanceConnectivityEndpoints',
+ 'ComputeInstanceCreatedBy',
+ 'ComputeInstanceLastOperation',
+ 'ComputeInstanceProperties',
+ 'ComputeInstanceSshSettings',
+ 'ComputeJobBase',
+ 'ComputeNodesInformation',
+ 'ComputeResource',
+ 'ComputeSecrets',
+ 'ContainerRegistry',
+ 'ContainerRegistryResponse',
+ 'ContainerResourceRequirements',
+ 'CreateEndpointVariantRequest',
+ 'CreateServiceRequest',
+ 'CreateServiceRequestEnvironmentImageRequest',
+ 'CreateServiceRequestKeys',
+ 'CsvExportSummary',
+ 'DataBinding',
+ 'DataContainerResource',
+ 'DataContainerResourceArmPaginatedResult',
+ 'DataFactory',
+ 'DataLakeAnalytics',
+ 'DataLakeAnalyticsProperties',
+ 'DataPathAssetReference',
+ 'DataSettings',
+ 'DataVersionResource',
+ 'DataVersionResourceArmPaginatedResult',
+ 'Databricks',
+ 'DatabricksComputeSecrets',
+ 'DatabricksProperties',
+ 'DatasetExportSummary',
+ 'DatasetReference',
+ 'DatastoreContents',
+ 'DatastoreCredentials',
+ 'DatastorePropertiesResource',
+ 'DatastorePropertiesResourceArmPaginatedResult',
+ 'DeploymentConfigurationBase',
+ 'DeploymentLogs',
+ 'DeploymentLogsRequest',
+ 'DistributionConfiguration',
+ 'DockerBuild',
+ 'DockerImage',
+ 'DockerImagePlatform',
+ 'DockerSpecification',
+ 'EarlyTerminationPolicyConfiguration',
+ 'EncryptionProperties',
+ 'EncryptionProperty',
+ 'EndpointAuthKeys',
+ 'EndpointAuthToken',
+ 'EnvironmentContainerResource',
+ 'EnvironmentContainerResourceArmPaginatedResult',
+ 'EnvironmentImageRequest',
+ 'EnvironmentImageRequestEnvironment',
+ 'EnvironmentImageRequestEnvironmentReference',
+ 'EnvironmentImageResponse',
+ 'EnvironmentImageResponseEnvironment',
+ 'EnvironmentImageResponseEnvironmentReference',
+ 'EnvironmentReference',
+ 'EnvironmentSpecificationVersionResource',
+ 'EnvironmentSpecificationVersionResourceArmPaginatedResult',
+ 'ErrorDetail',
+ 'ErrorResponse',
+ 'EstimatedVmPrice',
+ 'EstimatedVmPrices',
+ 'EvaluationConfiguration',
+ 'ExperimentLimits',
+ 'ExportSummary',
+ 'FeaturizationSettings',
+ 'ForecastingSettings',
+ 'GeneralSettings',
+ 'GlusterFsSection',
+ 'HdInsight',
+ 'HdInsightProperties',
+ 'IdAssetReference',
+ 'Identity',
+ 'IdentityConfiguration',
+ 'ImageAsset',
+ 'InferenceContainerProperties',
+ 'InputData',
+ 'JobBase',
+ 'JobBaseInteractionEndpoints',
+ 'JobBaseResource',
+ 'JobBaseResourceArmPaginatedResult',
+ 'JobOutput',
+ 'KeyVaultProperties',
+ 'LabelCategory',
+ 'LabelClass',
+ 'LabelingDatasetConfiguration',
+ 'LabelingJob',
+ 'LabelingJobImageProperties',
+ 'LabelingJobInstructions',
+ 'LabelingJobMediaProperties',
+ 'LabelingJobResource',
+ 'LabelingJobResourceArmPaginatedResult',
+ 'LabelingJobTextProperties',
+ 'LinkedInfo',
+ 'LinkedServiceList',
+ 'LinkedServiceProps',
+ 'LinkedServiceRequest',
+ 'LinkedServiceResponse',
+ 'ListAmlUserFeatureResult',
+ 'ListNotebookKeysResult',
+ 'ListUsagesResult',
+ 'ListWorkspaceKeysResult',
+ 'ListWorkspaceQuotas',
+ 'LivenessProbeRequirements',
+ 'MachineLearningServiceError',
+ 'ManagedComputeConfiguration',
+ 'ManagedDeploymentConfiguration',
+ 'ManagedIdentityConfiguration',
+ 'MedianStoppingPolicyConfiguration',
+ 'MlAssistConfiguration',
+ 'Model',
+ 'ModelContainerResource',
+ 'ModelContainerResourceArmPaginatedResult',
+ 'ModelDataCollection',
+ 'ModelDockerSection',
+ 'ModelDockerSectionBaseImageRegistry',
+ 'ModelDockerSectionResponse',
+ 'ModelDockerSectionResponseBaseImageRegistry',
+ 'ModelEnvironmentDefinition',
+ 'ModelEnvironmentDefinitionDocker',
+ 'ModelEnvironmentDefinitionPython',
+ 'ModelEnvironmentDefinitionR',
+ 'ModelEnvironmentDefinitionResponse',
+ 'ModelEnvironmentDefinitionResponseDocker',
+ 'ModelEnvironmentDefinitionResponsePython',
+ 'ModelEnvironmentDefinitionResponseR',
+ 'ModelEnvironmentDefinitionResponseSpark',
+ 'ModelEnvironmentDefinitionSpark',
+ 'ModelPythonSection',
+ 'ModelSparkSection',
+ 'ModelVersionResource',
+ 'ModelVersionResourceArmPaginatedResult',
+ 'Mpi',
+ 'NodeStateCounts',
+ 'NotebookListCredentialsResult',
+ 'NotebookPreparationError',
+ 'NotebookResourceInfo',
+ 'OnlineDeploymentScaleSettings',
+ 'OnlineDeploymentTrackedResource',
+ 'OnlineDeploymentTrackedResourceArmPaginatedResult',
+ 'OnlineEndpointTrackedResource',
+ 'OnlineEndpointTrackedResourceArmPaginatedResult',
+ 'Operation',
+ 'OperationDisplay',
+ 'OperationListResult',
+ 'OutputData',
+ 'OutputPathAssetReference',
+ 'PaginatedComputeResourcesList',
+ 'PaginatedServiceList',
+ 'PaginatedWorkspaceConnectionsList',
+ 'ParameterSamplingConfiguration',
+ 'PartialOnlineDeployment',
+ 'PartialOnlineDeploymentPartialTrackedResource',
+ 'PartialOnlineEndpoint',
+ 'PartialOnlineEndpointPartialTrackedResource',
+ 'Password',
+ 'PersonalComputeInstanceSettings',
+ 'Pipeline',
+ 'PipelineInput',
+ 'PipelineJob',
+ 'PipelineOutput',
+ 'PrivateEndpoint',
+ 'PrivateEndpointConnection',
+ 'PrivateLinkResource',
+ 'PrivateLinkResourceListResult',
+ 'PrivateLinkServiceConnectionState',
+ 'ProgressMetrics',
+ 'PyTorch',
+ 'QuotaBaseProperties',
+ 'QuotaUpdateParameters',
+ 'RCranPackage',
+ 'RGitHubPackage',
+ 'RGitHubPackageResponse',
+ 'RSection',
+ 'RSectionResponse',
+ 'RegenerateEndpointKeysRequest',
+ 'RegistryListCredentialsResult',
+ 'Resource',
+ 'ResourceId',
+ 'ResourceIdentity',
+ 'ResourceName',
+ 'ResourceQuota',
+ 'ResourceSkuLocationInfo',
+ 'ResourceSkuZoneDetails',
+ 'Restriction',
+ 'Route',
+ 'SasSection',
+ 'ScaleSettings',
+ 'ScriptReference',
+ 'ScriptsToExecute',
+ 'ServicePrincipalConfiguration',
+ 'ServicePrincipalCredentials',
+ 'ServicePrincipalSection',
+ 'ServiceResource',
+ 'ServiceResponseBase',
+ 'ServiceResponseBaseError',
+ 'SetupScripts',
+ 'SharedPrivateLinkResource',
+ 'Sku',
+ 'SkuCapability',
+ 'SkuListResult',
+ 'SparkMavenPackage',
+ 'SqlAdminSection',
+ 'SslConfiguration',
+ 'StatusMessage',
+ 'SweepJob',
+ 'SystemData',
+ 'SystemService',
+ 'TensorFlow',
+ 'TerminationConfiguration',
+ 'TrainingDataSettings',
+ 'TrainingSettings',
+ 'TrialComponent',
+ 'TruncationSelectionPolicyConfiguration',
+ 'UpdateWorkspaceQuotas',
+ 'UpdateWorkspaceQuotasResult',
+ 'Usage',
+ 'UsageName',
+ 'UserAccountCredentials',
+ 'UserAssignedIdentity',
+ 'UserAssignedIdentityMeta',
+ 'ValidationDataSettings',
+ 'VirtualMachine',
+ 'VirtualMachineImage',
+ 'VirtualMachineProperties',
+ 'VirtualMachineSecrets',
+ 'VirtualMachineSize',
+ 'VirtualMachineSizeListResult',
+ 'VirtualMachineSshCredentials',
+ 'VnetConfiguration',
+ 'Workspace',
+ 'WorkspaceConnection',
+ 'WorkspaceConnectionDto',
+ 'WorkspaceListResult',
+ 'WorkspaceSku',
+ 'WorkspaceUpdateParameters',
+ 'AllocationState',
+ 'ApplicationSharingPolicy',
+ 'AssetGenerator',
+ 'BillingCurrency',
+ 'ComponentInputType',
+ 'ComponentType',
+ 'ComputeEnvironmentType',
+ 'ComputeInstanceAuthorizationType',
+ 'ComputeInstanceState',
+ 'ComputeType',
+ 'ContainerType',
+ 'ContentsType',
+ 'CreatedByType',
+ 'CredentialsType',
+ 'DataBindingMode',
+ 'DatasetType',
+ 'DeploymentProvisioningState',
+ 'DeploymentType',
+ 'DistributionType',
+ 'DockerSpecificationType',
+ 'EarlyTerminationPolicyType',
+ 'EncryptionStatus',
+ 'EndpointAuthModeType',
+ 'EndpointComputeType',
+ 'EndpointProvisioningState',
+ 'EnvironmentSpecificationType',
+ 'ExportFormatType',
+ 'IdentityType',
+ 'ImageAnnotationType',
+ 'JobProvisioningState',
+ 'JobStatus',
+ 'JobType',
+ 'KeyType',
+ 'MediaType',
+ 'NodeState',
+ 'OperatingSystemType',
+ 'OperationName',
+ 'OperationStatus',
+ 'OptimizationMetric',
+ 'OrderString',
+ 'OriginType',
+ 'OsType',
+ 'OsTypes',
+ 'ParameterSamplingType',
+ 'PipelineType',
+ 'PrimaryMetricGoal',
+ 'PrivateEndpointConnectionProvisioningState',
+ 'PrivateEndpointServiceConnectionStatus',
+ 'ProvisioningState',
+ 'QuotaUnit',
+ 'ReasonCode',
+ 'ReferenceType',
+ 'RemoteLoginPortPublicAccess',
+ 'ResourceIdentityAssignment',
+ 'ResourceIdentityType',
+ 'ScaleTypeMode',
+ 'SshPublicAccess',
+ 'SslConfigurationStatus',
+ 'Status',
+ 'StatusMessageLevel',
+ 'TaskType',
+ 'TextAnnotationType',
+ 'UnderlyingResourceAction',
+ 'UnitOfMeasure',
+ 'UsageUnit',
+ 'VariantType',
+ 'VmPriceOsType',
+ 'VmPriority',
+ 'VmTier',
+ 'WebServiceState',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
new file mode 100644
index 00000000000..06725e09486
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
@@ -0,0 +1,596 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum, EnumMeta
+from six import with_metaclass
+
+class _CaseInsensitiveEnumMeta(EnumMeta):
+ def __getitem__(self, name):
+ return super().__getitem__(name.upper())
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+ """
+ try:
+ return cls._member_map_[name.upper()]
+ except KeyError:
+ raise AttributeError(name)
+
+
+class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Allocation state of the compute. Possible values are: steady - Indicates that the compute is
+ not resizing. There are no changes to the number of compute nodes in the compute in progress. A
+ compute enters this state when it is created and when no operations are being performed on the
+ compute to change the number of compute nodes. resizing - Indicates that the compute is
+ resizing; that is, compute nodes are being added to or removed from the compute.
+ """
+
+ STEADY = "Steady"
+ RESIZING = "Resizing"
+
+class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Policy for sharing applications on this compute instance among users of parent workspace. If
+ Personal, only the creator can access applications on this compute instance. When Shared, any
+ workspace user can access applications on this instance depending on his/her assigned role.
+ """
+
+ PERSONAL = "Personal"
+ SHARED = "Shared"
+
+class AssetGenerator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The target that initiated generation of this asset
+ """
+
+ USER = "User"
+ SYSTEM = "System"
+
+class BillingCurrency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Three lettered code specifying the currency of the VM price. Example: USD
+ """
+
+ USD = "USD"
+
+class ComponentInputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ GENERIC = "Generic"
+ RANGED_NUMBER = "RangedNumber"
+ ENUM = "Enum"
+
+class ComponentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ COMMAND_COMPONENT = "CommandComponent"
+
+class ComputeEnvironmentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The compute environment type for the service.
+ """
+
+ ACI = "ACI"
+ AKS = "AKS"
+
+class ComputeInstanceAuthorizationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The Compute Instance Authorization type. Available values are personal (default).
+ """
+
+ PERSONAL = "personal"
+
+class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Current state of an ComputeInstance.
+ """
+
+ CREATING = "Creating"
+ CREATE_FAILED = "CreateFailed"
+ DELETING = "Deleting"
+ RUNNING = "Running"
+ RESTARTING = "Restarting"
+ JOB_RUNNING = "JobRunning"
+ SETTING_UP = "SettingUp"
+ SETUP_FAILED = "SetupFailed"
+ STARTING = "Starting"
+ STOPPED = "Stopped"
+ STOPPING = "Stopping"
+ USER_SETTING_UP = "UserSettingUp"
+ USER_SETUP_FAILED = "UserSetupFailed"
+ UNKNOWN = "Unknown"
+ UNUSABLE = "Unusable"
+
+class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of compute
+ """
+
+ AKS = "AKS"
+ AML_COMPUTE = "AmlCompute"
+ COMPUTE_INSTANCE = "ComputeInstance"
+ DATA_FACTORY = "DataFactory"
+ VIRTUAL_MACHINE = "VirtualMachine"
+ HD_INSIGHT = "HDInsight"
+ DATABRICKS = "Databricks"
+ DATA_LAKE_ANALYTICS = "DataLakeAnalytics"
+
+class ContainerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ STORAGE_INITIALIZER = "StorageInitializer"
+ INFERENCE_SERVER = "InferenceServer"
+
+class ContentsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ AZURE_BLOB = "AzureBlob"
+ AZURE_DATA_LAKE = "AzureDataLake"
+ AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2"
+ AZURE_FILE = "AzureFile"
+ AZURE_MY_SQL = "AzureMySql"
+ AZURE_POSTGRE_SQL = "AzurePostgreSql"
+ AZURE_SQL_DATABASE = "AzureSqlDatabase"
+ GLUSTER_FS = "GlusterFs"
+
+class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of identity that created the resource.
+ """
+
+ USER = "User"
+ APPLICATION = "Application"
+ MANAGED_IDENTITY = "ManagedIdentity"
+ KEY = "Key"
+
+class CredentialsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ ACCOUNT_KEY = "AccountKey"
+ CERTIFICATE = "Certificate"
+ NONE = "None"
+ SAS = "Sas"
+ SERVICE_PRINCIPAL = "ServicePrincipal"
+ SQL_ADMIN = "SqlAdmin"
+
+class DataBindingMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Describes how the data should be attached to the container.
+ """
+
+ MOUNT = "Mount"
+ DOWNLOAD = "Download"
+ UPLOAD = "Upload"
+
+class DatasetType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ SIMPLE = "Simple"
+ DATAFLOW = "Dataflow"
+
+class DeploymentProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SCALING = "Scaling"
+ UPDATING = "Updating"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+class DeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The deployment type for the service.
+ """
+
+ GRPC_REALTIME_ENDPOINT = "GRPCRealtimeEndpoint"
+ HTTP_REALTIME_ENDPOINT = "HttpRealtimeEndpoint"
+ BATCH = "Batch"
+
+class DistributionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ PY_TORCH = "PyTorch"
+ TENSOR_FLOW = "TensorFlow"
+ MPI = "Mpi"
+
+class DockerSpecificationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Docker specification must be either Build or Image
+ """
+
+ BUILD = "Build"
+ IMAGE = "Image"
+
+class EarlyTerminationPolicyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ BANDIT = "Bandit"
+ MEDIAN_STOPPING = "MedianStopping"
+ TRUNCATION_SELECTION = "TruncationSelection"
+
+class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Indicates whether or not the encryption is enabled for the workspace.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class EndpointAuthModeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ AML_TOKEN = "AMLToken"
+ KEY = "Key"
+ AAD_TOKEN = "AADToken"
+
+class EndpointComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ MANAGED = "Managed"
+ AKS = "AKS"
+ AZURE_ML_COMPUTE = "AzureMLCompute"
+
+class EndpointProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of endpoint provisioning.
+ """
+
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ UPDATING = "Updating"
+ CANCELED = "Canceled"
+
+class EnvironmentSpecificationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Environment specification is either user created or curated by Azure ML service
+ """
+
+ CURATED = "Curated"
+ USER_CREATED = "UserCreated"
+
+class ExportFormatType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The format of exported labels.
+ """
+
+ DATASET = "Dataset"
+ COCO = "Coco"
+ CSV = "CSV"
+
+class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ MANAGED = "Managed"
+ SERVICE_PRINCIPAL = "ServicePrincipal"
+ AML_TOKEN = "AMLToken"
+
+class ImageAnnotationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Annotation type of image data.
+ """
+
+ CLASSIFICATION = "Classification"
+ BOUNDING_BOX = "BoundingBox"
+ INSTANCE_SEGMENTATION = "InstanceSegmentation"
+
+class JobProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+ IN_PROGRESS = "InProgress"
+
+class JobStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The status of a job.
+ """
+
+ NOT_STARTED = "NotStarted"
+ STARTING = "Starting"
+ PROVISIONING = "Provisioning"
+ PREPARING = "Preparing"
+ QUEUED = "Queued"
+ RUNNING = "Running"
+ FINALIZING = "Finalizing"
+ CANCEL_REQUESTED = "CancelRequested"
+ COMPLETED = "Completed"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+ NOT_RESPONDING = "NotResponding"
+ PAUSED = "Paused"
+
+class JobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ COMMAND = "Command"
+ SWEEP = "Sweep"
+ LABELING = "Labeling"
+ PIPELINE = "Pipeline"
+ DATA = "Data"
+ AUTO_ML = "AutoML"
+
+class KeyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ PRIMARY = "Primary"
+ SECONDARY = "Secondary"
+
+class MediaType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Media type of data asset.
+ """
+
+ IMAGE = "Image"
+ TEXT = "Text"
+
+class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the compute node. Values are idle, running, preparing, unusable, leaving and
+ preempted.
+ """
+
+ IDLE = "idle"
+ RUNNING = "running"
+ PREPARING = "preparing"
+ UNUSABLE = "unusable"
+ LEAVING = "leaving"
+ PREEMPTED = "preempted"
+
+class OperatingSystemType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of operating system.
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Name of the last operation.
+ """
+
+ CREATE = "Create"
+ START = "Start"
+ STOP = "Stop"
+ RESTART = "Restart"
+ REIMAGE = "Reimage"
+ DELETE = "Delete"
+
+class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operation status.
+ """
+
+ IN_PROGRESS = "InProgress"
+ SUCCEEDED = "Succeeded"
+ CREATE_FAILED = "CreateFailed"
+ START_FAILED = "StartFailed"
+ STOP_FAILED = "StopFailed"
+ RESTART_FAILED = "RestartFailed"
+ REIMAGE_FAILED = "ReimageFailed"
+ DELETE_FAILED = "DeleteFailed"
+
+class OptimizationMetric(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ AUC_WEIGHTED = "AUC_weighted"
+ ACCURACY = "Accuracy"
+ NORM_MACRO_RECALL = "Norm_macro_recall"
+ AVERAGE_PRECISION_SCORE_WEIGHTED = "Average_precision_score_weighted"
+ PRECISION_SCORE_WEIGHTED = "Precision_score_weighted"
+ SPEARMAN_CORRELATION = "Spearman_correlation"
+ NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "Normalized_root_mean_squared_error"
+ R2_SCORE = "R2_score"
+ NORMALIZED_MEAN_ABSOLUTE_ERROR = "Normalized_mean_absolute_error"
+ NORMALIZED_ROOT_MEAN_SQUARED_LOG_ERROR = "Normalized_root_mean_squared_log_error"
+
+class OrderString(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ CREATED_AT_DESC = "CreatedAtDesc"
+ CREATED_AT_ASC = "CreatedAtAsc"
+ UPDATED_AT_DESC = "UpdatedAtDesc"
+ UPDATED_AT_ASC = "UpdatedAtAsc"
+
+class OriginType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ SYNAPSE = "Synapse"
+
+class OsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Compute OS Type
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class OsTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class ParameterSamplingType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ GRID = "Grid"
+ RANDOM = "Random"
+ BAYESIAN = "Bayesian"
+
+class PipelineType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ AZURE_ML = "AzureML"
+
+class PrimaryMetricGoal(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Defines supported metric goals for hyperparameter tuning
+ """
+
+ MINIMIZE = "Minimize"
+ MAXIMIZE = "Maximize"
+
+class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current provisioning state.
+ """
+
+ SUCCEEDED = "Succeeded"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ FAILED = "Failed"
+
+class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The private endpoint connection status.
+ """
+
+ PENDING = "Pending"
+ APPROVED = "Approved"
+ REJECTED = "Rejected"
+ DISCONNECTED = "Disconnected"
+ TIMEOUT = "Timeout"
+
+class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current deployment state of workspace resource. The provisioningState is to indicate states
+ for resource provisioning.
+ """
+
+ UNKNOWN = "Unknown"
+ UPDATING = "Updating"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of quota measurement.
+ """
+
+ COUNT = "Count"
+
+class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The reason for the restriction.
+ """
+
+ NOT_SPECIFIED = "NotSpecified"
+ NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion"
+ NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription"
+
+class ReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ ID = "Id"
+ DATA_PATH = "DataPath"
+ OUTPUT_PATH = "OutputPath"
+
+class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is
+ open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed
+ on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be
+ default only during cluster creation time, after creation it will be either enabled or
+ disabled.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+ NOT_SPECIFIED = "NotSpecified"
+
+class ResourceIdentityAssignment(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Defines values for a ResourceIdentity's type.
+ """
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+ NONE = "None"
+
+class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The identity type.
+ """
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ NONE = "None"
+
+class ScaleTypeMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ AUTOMATIC = "Automatic"
+ MANUAL = "Manual"
+ NONE = "None"
+
+class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on this instance. Enabled - Indicates that the public ssh port is open and
+ accessible according to the VNet/subnet policy if applicable.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class SslConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enable or disable ssl for scoring
+ """
+
+ DISABLED = "Disabled"
+ ENABLED = "Enabled"
+ AUTO = "Auto"
+
+class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Status of update workspace quota.
+ """
+
+ UNDEFINED = "Undefined"
+ SUCCESS = "Success"
+ FAILURE = "Failure"
+ INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum"
+ INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit"
+ INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName"
+ OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku"
+ OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion"
+
+class StatusMessageLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ ERROR = "Error"
+ INFORMATION = "Information"
+ WARNING = "Warning"
+
+class TaskType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Experiment Task type.
+ """
+
+ CLASSIFICATION = "Classification"
+ REGRESSION = "Regression"
+ FORECASTING = "Forecasting"
+
+class TextAnnotationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Annotation type of text data.
+ """
+
+ CLASSIFICATION = "Classification"
+
+class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ DELETE = "Delete"
+ DETACH = "Detach"
+
+class UnitOfMeasure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The unit of time measurement for the specified VM price. Example: OneHour
+ """
+
+ ONE_HOUR = "OneHour"
+
+class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of usage measurement.
+ """
+
+ COUNT = "Count"
+
+class VariantType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of the variant.
+ """
+
+ CONTROL = "Control"
+ TREATMENT = "Treatment"
+
+class VmPriceOsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operating system type used by the VM.
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Virtual Machine priority
+ """
+
+ DEDICATED = "Dedicated"
+ LOW_PRIORITY = "LowPriority"
+
+class VmTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of the VM.
+ """
+
+ STANDARD = "Standard"
+ LOW_PRIORITY = "LowPriority"
+ SPOT = "Spot"
+
+class WebServiceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current state of the service.
+ """
+
+ TRANSITIONING = "Transitioning"
+ HEALTHY = "Healthy"
+ UNHEALTHY = "Unhealthy"
+ FAILED = "Failed"
+ UNSCHEDULABLE = "Unschedulable"
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
new file mode 100644
index 00000000000..fcac0d83e5c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
@@ -0,0 +1,12238 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class AccountKeySection(msrest.serialization.Model):
+ """AccountKeySection.
+
+ :param key: Storage account key.
+ :type key: str
+ """
+
+ _attribute_map = {
+ 'key': {'key': 'key', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AccountKeySection, self).__init__(**kwargs)
+ self.key = kwargs.get('key', None)
+
+
+class CreateServiceRequest(msrest.serialization.Model):
+ """The base class for creating a service.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceCreateRequest, CreateEndpointVariantRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceCreateRequest', 'Custom': 'CreateEndpointVariantRequest'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateServiceRequest, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.kv_tags = kwargs.get('kv_tags', None)
+ self.properties = kwargs.get('properties', None)
+ self.keys = kwargs.get('keys', None)
+ self.compute_type = None # type: Optional[str]
+ self.environment_image_request = kwargs.get('environment_image_request', None)
+ self.location = kwargs.get('location', None)
+
+
+class AciServiceCreateRequest(CreateServiceRequest):
+ """AciServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param dns_name_label: The Dns label for the service.
+ :type dns_name_label: str
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequest, self).__init__(**kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.auth_enabled = kwargs.get('auth_enabled', False)
+ self.ssl_enabled = kwargs.get('ssl_enabled', False)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', False)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.ssl_certificate = kwargs.get('ssl_certificate', None)
+ self.ssl_key = kwargs.get('ssl_key', None)
+ self.cname = kwargs.get('cname', None)
+ self.dns_name_label = kwargs.get('dns_name_label', None)
+ self.vnet_configuration = kwargs.get('vnet_configuration', None)
+ self.encryption_properties = kwargs.get('encryption_properties', None)
+
+
+class ModelDataCollection(msrest.serialization.Model):
+ """The Model data collection properties.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDataCollection, self).__init__(**kwargs)
+ self.event_hub_enabled = kwargs.get('event_hub_enabled', None)
+ self.storage_enabled = kwargs.get('storage_enabled', None)
+
+
+class AciServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestDataCollection, self).__init__(**kwargs)
+
+
+class EncryptionProperties(msrest.serialization.Model):
+ """EncryptionProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EncryptionProperties, self).__init__(**kwargs)
+ self.vault_base_url = kwargs['vault_base_url']
+ self.key_name = kwargs['key_name']
+ self.key_version = kwargs['key_version']
+
+
+class AciServiceCreateRequestEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestEncryptionProperties, self).__init__(**kwargs)
+
+
+class VnetConfiguration(msrest.serialization.Model):
+ """VnetConfiguration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VnetConfiguration, self).__init__(**kwargs)
+ self.vnet_name = kwargs.get('vnet_name', None)
+ self.subnet_name = kwargs.get('subnet_name', None)
+
+
+class AciServiceCreateRequestVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestVnetConfiguration, self).__init__(**kwargs)
+
+
+class ServiceResponseBase(msrest.serialization.Model):
+ """The base service response. The correct inherited response based on computeType will be returned (ex. ACIServiceResponse).
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceResponse, AksVariantResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceResponse', 'Custom': 'AksVariantResponse'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResponseBase, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.kv_tags = kwargs.get('kv_tags', None)
+ self.properties = kwargs.get('properties', None)
+ self.state = None
+ self.error = None
+ self.compute_type = None # type: Optional[str]
+ self.deployment_type = kwargs.get('deployment_type', None)
+
+
+class AciServiceResponse(ServiceResponseBase):
+ """The response for an ACI service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param public_ip: The public IP address for the service.
+ :type public_ip: str
+ :param public_fqdn: The public Fqdn for the service.
+ :type public_fqdn: str
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'public_ip': {'key': 'publicIp', 'type': 'str'},
+ 'public_fqdn': {'key': 'publicFqdn', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponse, self).__init__(**kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.scoring_uri = None
+ self.location = kwargs.get('location', None)
+ self.auth_enabled = kwargs.get('auth_enabled', None)
+ self.ssl_enabled = kwargs.get('ssl_enabled', None)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.ssl_certificate = kwargs.get('ssl_certificate', None)
+ self.ssl_key = kwargs.get('ssl_key', None)
+ self.cname = kwargs.get('cname', None)
+ self.public_ip = kwargs.get('public_ip', None)
+ self.public_fqdn = kwargs.get('public_fqdn', None)
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.models = kwargs.get('models', None)
+ self.environment_image_request = kwargs.get('environment_image_request', None)
+ self.vnet_configuration = kwargs.get('vnet_configuration', None)
+ self.encryption_properties = kwargs.get('encryption_properties', None)
+
+
+class AciServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseDataCollection, self).__init__(**kwargs)
+
+
+class AciServiceResponseEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseEncryptionProperties, self).__init__(**kwargs)
+
+
+class EnvironmentImageResponse(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageResponse, self).__init__(**kwargs)
+ self.driver_program = kwargs.get('driver_program', None)
+ self.assets = kwargs.get('assets', None)
+ self.model_ids = kwargs.get('model_ids', None)
+ self.models = kwargs.get('models', None)
+ self.environment = kwargs.get('environment', None)
+ self.environment_reference = kwargs.get('environment_reference', None)
+
+
+class AciServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseEnvironmentImageRequest, self).__init__(**kwargs)
+
+
+class AciServiceResponseVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseVnetConfiguration, self).__init__(**kwargs)
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Aks, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeConfiguration(msrest.serialization.Model):
+ """ComputeConfiguration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeConfiguration, AzureMlComputeConfiguration, ManagedComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeConfiguration', 'AzureMLCompute': 'AzureMlComputeConfiguration', 'Managed': 'ManagedComputeConfiguration'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeConfiguration(ComputeConfiguration):
+ """AksComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param namespace:
+ :type namespace: str
+ :param compute_name: Required.
+ :type compute_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'compute_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.namespace = kwargs.get('namespace', None)
+ self.compute_name = kwargs['compute_name']
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = kwargs.get('user_kube_config', None)
+ self.admin_kube_config = kwargs.get('admin_kube_config', None)
+ self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None)
+
+
+class DeploymentConfigurationBase(msrest.serialization.Model):
+ """DeploymentConfigurationBase.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksDeploymentConfiguration, ManagedDeploymentConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param app_insights_enabled:
+ :type app_insights_enabled: bool
+ :param max_concurrent_requests_per_instance:
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait_ms:
+ :type max_queue_wait_ms: int
+ :param scoring_timeout_ms:
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksDeploymentConfiguration', 'Managed': 'ManagedDeploymentConfiguration'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DeploymentConfigurationBase, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.max_concurrent_requests_per_instance = kwargs.get('max_concurrent_requests_per_instance', None)
+ self.max_queue_wait_ms = kwargs.get('max_queue_wait_ms', None)
+ self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
+ self.liveness_probe_requirements = kwargs.get('liveness_probe_requirements', None)
+
+
+class AksDeploymentConfiguration(DeploymentConfigurationBase):
+ """AksDeploymentConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param app_insights_enabled:
+ :type app_insights_enabled: bool
+ :param max_concurrent_requests_per_instance:
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait_ms:
+ :type max_queue_wait_ms: int
+ :param scoring_timeout_ms:
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param container_resource_requirements: The resource requirements for the container (cpu and
+ memory).
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param model_data_collection: The Model data collection properties.
+ :type model_data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'model_data_collection': {'key': 'modelDataCollection', 'type': 'ModelDataCollection'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksDeploymentConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.model_data_collection = kwargs.get('model_data_collection', None)
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = kwargs.get('subnet_id', None)
+ self.service_cidr = kwargs.get('service_cidr', None)
+ self.dns_service_ip = kwargs.get('dns_service_ip', None)
+ self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None)
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 1},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = kwargs.get('cluster_fqdn', None)
+ self.system_services = None
+ self.agent_count = kwargs.get('agent_count', None)
+ self.agent_vm_size = kwargs.get('agent_vm_size', None)
+ self.ssl_configuration = kwargs.get('ssl_configuration', None)
+ self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None)
+
+
+class AksReplicaStatus(msrest.serialization.Model):
+ """AksReplicaStatus.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksReplicaStatus, self).__init__(**kwargs)
+ self.desired_replicas = kwargs.get('desired_replicas', None)
+ self.updated_replicas = kwargs.get('updated_replicas', None)
+ self.available_replicas = kwargs.get('available_replicas', None)
+ self.error = kwargs.get('error', None)
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Error response information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.details = None
+
+
+class AksReplicaStatusError(ErrorResponse):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksReplicaStatusError, self).__init__(**kwargs)
+
+
+class CreateEndpointVariantRequest(CreateServiceRequest):
+ """The Variant properties.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceCreateRequest'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateEndpointVariantRequest, self).__init__(**kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = kwargs.get('is_default', None)
+ self.traffic_percentile = kwargs.get('traffic_percentile', None)
+ self.type = kwargs.get('type', None)
+
+
+class AksServiceCreateRequest(CreateEndpointVariantRequest):
+ """The request to create an AKS service.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param namespace: Kubernetes namespace for the service.
+ :type namespace: str
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequest, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.num_replicas = kwargs.get('num_replicas', None)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.compute_name = kwargs.get('compute_name', None)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.auto_scaler = kwargs.get('auto_scaler', None)
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.max_concurrent_requests_per_container = kwargs.get('max_concurrent_requests_per_container', None)
+ self.max_queue_wait_ms = kwargs.get('max_queue_wait_ms', None)
+ self.namespace = kwargs.get('namespace', None)
+ self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
+ self.auth_enabled = kwargs.get('auth_enabled', None)
+ self.liveness_probe_requirements = kwargs.get('liveness_probe_requirements', None)
+ self.aad_auth_enabled = kwargs.get('aad_auth_enabled', None)
+
+
+class AutoScaler(msrest.serialization.Model):
+ """The Auto Scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoScaler, self).__init__(**kwargs)
+ self.autoscale_enabled = kwargs.get('autoscale_enabled', None)
+ self.min_replicas = kwargs.get('min_replicas', None)
+ self.max_replicas = kwargs.get('max_replicas', None)
+ self.target_utilization = kwargs.get('target_utilization', None)
+ self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None)
+
+
+class AksServiceCreateRequestAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestAutoScaler, self).__init__(**kwargs)
+
+
+class AksServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestDataCollection, self).__init__(**kwargs)
+
+
+class LivenessProbeRequirements(msrest.serialization.Model):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LivenessProbeRequirements, self).__init__(**kwargs)
+ self.failure_threshold = kwargs.get('failure_threshold', None)
+ self.success_threshold = kwargs.get('success_threshold', None)
+ self.timeout_seconds = kwargs.get('timeout_seconds', None)
+ self.period_seconds = kwargs.get('period_seconds', None)
+ self.initial_delay_seconds = kwargs.get('initial_delay_seconds', None)
+
+
+class AksServiceCreateRequestLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestLivenessProbeRequirements, self).__init__(**kwargs)
+
+
+class AksVariantResponse(ServiceResponseBase):
+ """The response for an AKS variant.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceResponse'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksVariantResponse, self).__init__(**kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = kwargs.get('is_default', None)
+ self.traffic_percentile = kwargs.get('traffic_percentile', None)
+ self.type = kwargs.get('type', None)
+
+
+class AksServiceResponse(AksVariantResponse):
+ """The response for an AKS service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param namespace: The Kubernetes namespace of the deployment.
+ :type namespace: str
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :ivar deployment_status: The deployment status.
+ :vartype deployment_status: ~azure_machine_learning_workspaces.models.AksReplicaStatus
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'deployment_status': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'deployment_status': {'key': 'deploymentStatus', 'type': 'AksReplicaStatus'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponse, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.models = kwargs.get('models', None)
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.max_concurrent_requests_per_container = kwargs.get('max_concurrent_requests_per_container', None)
+ self.max_queue_wait_ms = kwargs.get('max_queue_wait_ms', None)
+ self.compute_name = kwargs.get('compute_name', None)
+ self.namespace = kwargs.get('namespace', None)
+ self.num_replicas = kwargs.get('num_replicas', None)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.auto_scaler = kwargs.get('auto_scaler', None)
+ self.scoring_uri = None
+ self.deployment_status = None
+ self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
+ self.liveness_probe_requirements = kwargs.get('liveness_probe_requirements', None)
+ self.auth_enabled = kwargs.get('auth_enabled', None)
+ self.aad_auth_enabled = kwargs.get('aad_auth_enabled', None)
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.environment_image_request = kwargs.get('environment_image_request', None)
+
+
+class AksServiceResponseAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseAutoScaler, self).__init__(**kwargs)
+
+
+class AksServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseDataCollection, self).__init__(**kwargs)
+
+
+class AksServiceResponseDeploymentStatus(AksReplicaStatus):
+ """The deployment status.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseDeploymentStatus, self).__init__(**kwargs)
+
+
+class AksServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseEnvironmentImageRequest, self).__init__(**kwargs)
+
+
+class AksServiceResponseLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseLivenessProbeRequirements, self).__init__(**kwargs)
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = kwargs.get('os_type', "Linux")
+ self.vm_size = kwargs.get('vm_size', None)
+ self.vm_priority = kwargs.get('vm_priority', None)
+ self.virtual_machine_image = kwargs.get('virtual_machine_image', None)
+ self.isolated_network = kwargs.get('isolated_network', None)
+ self.scale_settings = kwargs.get('scale_settings', None)
+ self.user_account_credentials = kwargs.get('user_account_credentials', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified")
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True)
+
+
+class IdentityConfiguration(msrest.serialization.Model):
+ """IdentityConfiguration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlTokenConfiguration, ManagedIdentityConfiguration, ServicePrincipalConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'identity_type': {'AMLToken': 'AmlTokenConfiguration', 'Managed': 'ManagedIdentityConfiguration', 'ServicePrincipal': 'ServicePrincipalConfiguration'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdentityConfiguration, self).__init__(**kwargs)
+ self.identity_type = None # type: Optional[str]
+
+
+class AmlTokenConfiguration(IdentityConfiguration):
+ """AmlTokenConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlTokenConfiguration, self).__init__(**kwargs)
+ self.identity_type = 'AMLToken' # type: str
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.display_name = kwargs.get('display_name', None)
+ self.description = kwargs.get('description', None)
+
+
+class AssetPath(msrest.serialization.Model):
+ """Details of an AssetUri.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param path: Required. The path of file/directory.
+ :type path: str
+ :param is_directory: Whether the path defines a directory or a single file.
+ :type is_directory: bool
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'path': {'key': 'path', 'type': 'str'},
+ 'is_directory': {'key': 'isDirectory', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssetPath, self).__init__(**kwargs)
+ self.path = kwargs['path']
+ self.is_directory = kwargs.get('is_directory', None)
+
+
+class AssetReferenceBase(msrest.serialization.Model):
+ """AssetReferenceBase.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DataPathAssetReference, IdAssetReference, OutputPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssetReferenceBase, self).__init__(**kwargs)
+ self.reference_type = None # type: Optional[str]
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = kwargs['object_id']
+ self.tenant_id = kwargs['tenant_id']
+
+
+class AuthKeys(msrest.serialization.Model):
+ """AuthKeys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AuthKeys, self).__init__(**kwargs)
+ self.primary_key = kwargs.get('primary_key', None)
+ self.secondary_key = kwargs.get('secondary_key', None)
+
+
+class JobBase(msrest.serialization.Model):
+ """Job base definition.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ComputeJobBase, LabelingJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ }
+
+ _subtype_map = {
+ 'job_type': {'ComputeJobBase': 'ComputeJobBase', 'Labeling': 'LabelingJob'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobBase, self).__init__(**kwargs)
+ self.job_type = None # type: Optional[str]
+ self.provisioning_state = None
+ self.interaction_endpoints = None
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeJobBase(JobBase):
+ """Compute job base definition.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AutoMlJob, CommandJob, PipelineJob, SweepJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ }
+
+ _subtype_map = {
+ 'job_type': {'AutoML': 'AutoMlJob', 'Command': 'CommandJob', 'Pipeline': 'PipelineJob', 'Sweep': 'SweepJob'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeJobBase, self).__init__(**kwargs)
+ self.job_type = 'ComputeJobBase' # type: str
+ self.experiment_name = kwargs.get('experiment_name', None)
+ self.compute_binding = kwargs['compute_binding']
+ self.output = None
+ self.priority = kwargs.get('priority', None)
+
+
+class AutoMlJob(ComputeJobBase):
+ """AutoML Job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param general_settings: General Settings.
+ :type general_settings: ~azure_machine_learning_workspaces.models.GeneralSettings
+ :param limit_settings: Limit Settings.
+ :type limit_settings: ~azure_machine_learning_workspaces.models.ExperimentLimits
+ :param data_settings: Collection of registered Tabular Dataset Ids required for training.
+ :type data_settings: ~azure_machine_learning_workspaces.models.DataSettings
+ :param featurization_settings: Featurization related configuration.
+ :type featurization_settings: ~azure_machine_learning_workspaces.models.FeaturizationSettings
+ :param forecasting_settings: Forecasting experiment specific configuration.
+ :type forecasting_settings: ~azure_machine_learning_workspaces.models.ForecastingSettings
+ :param training_settings: Advanced configuration settings for an AutoML Job.
+ :type training_settings: ~azure_machine_learning_workspaces.models.TrainingSettings
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'general_settings': {'key': 'generalSettings', 'type': 'GeneralSettings'},
+ 'limit_settings': {'key': 'limitSettings', 'type': 'ExperimentLimits'},
+ 'data_settings': {'key': 'dataSettings', 'type': 'DataSettings'},
+ 'featurization_settings': {'key': 'featurizationSettings', 'type': 'FeaturizationSettings'},
+ 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'},
+ 'training_settings': {'key': 'trainingSettings', 'type': 'TrainingSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoMlJob, self).__init__(**kwargs)
+ self.job_type = 'AutoML' # type: str
+ self.status = None
+ self.general_settings = kwargs.get('general_settings', None)
+ self.limit_settings = kwargs.get('limit_settings', None)
+ self.data_settings = kwargs.get('data_settings', None)
+ self.featurization_settings = kwargs.get('featurization_settings', None)
+ self.forecasting_settings = kwargs.get('forecasting_settings', None)
+ self.training_settings = kwargs.get('training_settings', None)
+
+
+class AzureDataLakeSection(msrest.serialization.Model):
+ """AzureDataLakeSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials: Required. Azure Data Lake credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param store_name: Required. Azure Data Lake store name.
+ :type store_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'store_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'store_name': {'key': 'storeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureDataLakeSection, self).__init__(**kwargs)
+ self.credentials = kwargs['credentials']
+ self.store_name = kwargs['store_name']
+
+
+class AzureMlComputeConfiguration(ComputeConfiguration):
+ """AzureMlComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureMlComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'AzureMLCompute' # type: str
+
+
+class AzureMySqlSection(msrest.serialization.Model):
+ """AzureMySqlSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials: Required. Azure SQL database credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureMySqlSection, self).__init__(**kwargs)
+ self.credentials = kwargs['credentials']
+ self.database_name = kwargs['database_name']
+ self.endpoint = kwargs['endpoint']
+ self.port_number = kwargs['port_number']
+ self.server_name = kwargs['server_name']
+
+
+class AzurePostgreSqlSection(msrest.serialization.Model):
+ """AzurePostgreSqlSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param enable_ssl: Whether the Azure PostgreSQL server requires SSL.
+ :type enable_ssl: bool
+ :param credentials: Required. Azure SQL database credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'enable_ssl': {'key': 'enableSSL', 'type': 'bool'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzurePostgreSqlSection, self).__init__(**kwargs)
+ self.enable_ssl = kwargs.get('enable_ssl', None)
+ self.credentials = kwargs['credentials']
+ self.database_name = kwargs['database_name']
+ self.endpoint = kwargs['endpoint']
+ self.port_number = kwargs['port_number']
+ self.server_name = kwargs['server_name']
+
+
+class AzureSqlDatabaseSection(msrest.serialization.Model):
+ """AzureSqlDatabaseSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials: Required. Azure SQL database credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureSqlDatabaseSection, self).__init__(**kwargs)
+ self.credentials = kwargs['credentials']
+ self.database_name = kwargs['database_name']
+ self.endpoint = kwargs['endpoint']
+ self.port_number = kwargs['port_number']
+ self.server_name = kwargs['server_name']
+
+
+class AzureStorageSection(msrest.serialization.Model):
+ """AzureStorageSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param blob_cache_timeout: Blob storage cache timeout.
+ :type blob_cache_timeout: int
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Storage account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'blob_cache_timeout': {'key': 'blobCacheTimeout', 'type': 'int'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureStorageSection, self).__init__(**kwargs)
+ self.account_name = kwargs['account_name']
+ self.blob_cache_timeout = kwargs.get('blob_cache_timeout', None)
+ self.container_name = kwargs['container_name']
+ self.credentials = kwargs['credentials']
+ self.endpoint = kwargs['endpoint']
+ self.protocol = kwargs['protocol']
+
+
+class EarlyTerminationPolicyConfiguration(msrest.serialization.Model):
+ """Early termination policies enable canceling poor-performing runs before they complete.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: BanditPolicyConfiguration, MedianStoppingPolicyConfiguration, TruncationSelectionPolicyConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ }
+
+ _subtype_map = {
+ 'policy_type': {'Bandit': 'BanditPolicyConfiguration', 'MedianStopping': 'MedianStoppingPolicyConfiguration', 'TruncationSelection': 'TruncationSelectionPolicyConfiguration'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EarlyTerminationPolicyConfiguration, self).__init__(**kwargs)
+ self.policy_type = None # type: Optional[str]
+ self.evaluation_interval = kwargs.get('evaluation_interval', None)
+ self.delay_evaluation = kwargs.get('delay_evaluation', None)
+
+
+class BanditPolicyConfiguration(EarlyTerminationPolicyConfiguration):
+ """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ :param slack_factor:
+ :type slack_factor: float
+ :param slack_amount:
+ :type slack_amount: float
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'slack_factor': {'key': 'slackFactor', 'type': 'float'},
+ 'slack_amount': {'key': 'slackAmount', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BanditPolicyConfiguration, self).__init__(**kwargs)
+ self.policy_type = 'Bandit' # type: str
+ self.slack_factor = kwargs.get('slack_factor', None)
+ self.slack_amount = kwargs.get('slack_amount', None)
+
+
+class CertificateSection(msrest.serialization.Model):
+ """CertificateSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param certificate: Service principal certificate.
+ :type certificate: str
+ :param thumbprint: Required. Thumbprint of the certificate used for authentication.
+ :type thumbprint: str
+ """
+
+ _validation = {
+ 'tenant_id': {'required': True},
+ 'client_id': {'required': True},
+ 'thumbprint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'certificate': {'key': 'certificate', 'type': 'str'},
+ 'thumbprint': {'key': 'thumbprint', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CertificateSection, self).__init__(**kwargs)
+ self.authority_url = kwargs.get('authority_url', None)
+ self.resource_uri = kwargs.get('resource_uri', None)
+ self.tenant_id = kwargs['tenant_id']
+ self.client_id = kwargs['client_id']
+ self.certificate = kwargs.get('certificate', None)
+ self.thumbprint = kwargs['thumbprint']
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = kwargs.get('scale_settings', None)
+
+
+class ExportSummary(msrest.serialization.Model):
+ """ExportSummary.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: CsvExportSummary, CocoExportSummary, DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ }
+
+ _subtype_map = {
+ 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ExportSummary, self).__init__(**kwargs)
+ self.format = None # type: Optional[str]
+ self.labeling_job_id = None
+ self.exported_row_count = None
+ self.start_time_utc = None
+ self.end_time_utc = None
+
+
+class CocoExportSummary(ExportSummary):
+ """CocoExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ 'container_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CocoExportSummary, self).__init__(**kwargs)
+ self.format = 'Coco' # type: str
+ self.snapshot_path = None
+ self.container_name = None
+
+
+class CodeConfiguration(msrest.serialization.Model):
+ """CodeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code_artifact_id: The ID of the code asset.
+ :type code_artifact_id: str
+ :param command: Required. The command to execute on startup of the job. eg. ["python",
+ "train.py"].
+ :type command: str
+ """
+
+ _validation = {
+ 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'code_artifact_id': {'key': 'codeArtifactId', 'type': 'str'},
+ 'command': {'key': 'command', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeConfiguration, self).__init__(**kwargs)
+ self.code_artifact_id = kwargs.get('code_artifact_id', None)
+ self.command = kwargs['command']
+
+
+class CodeContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param description:
+ :type description: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+ self.description = kwargs.get('description', None)
+
+
+class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeContainer entities.
+
+ :param value: An array of objects of type CodeContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[CodeContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class CodeVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param datastore_id: The asset datastoreId.
+ :type datastore_id: str
+ :param asset_path: DEPRECATED - use
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead.
+ :type asset_path: ~azure_machine_learning_workspaces.models.AssetPath
+ :param path: The path of the file/directory.
+ :type path: str
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'datastore_id': {'key': 'properties.datastoreId', 'type': 'str'},
+ 'asset_path': {'key': 'properties.assetPath', 'type': 'AssetPath'},
+ 'path': {'key': 'properties.path', 'type': 'str'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.asset_path = kwargs.get('asset_path', None)
+ self.path = kwargs.get('path', None)
+ self.generated_by = kwargs.get('generated_by', None)
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeVersion entities.
+
+ :param value: An array of objects of type CodeVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[CodeVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class CommandJob(ComputeJobBase):
+ """Code Job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param max_run_duration_seconds: The max run duration in seconds, after which the job will be
+ cancelled.
+ :type max_run_duration_seconds: long
+ :param code_configuration: Required. Code configuration of the job.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param environment_id: Environment specification of the job.
+ :type environment_id: str
+ :param data_bindings: Mapping of data bindings used in the job.
+ :type data_bindings: dict[str, ~azure_machine_learning_workspaces.models.DataBinding]
+ :param distribution_configuration:
+ :type distribution_configuration:
+ ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param identity_configuration:
+ :type identity_configuration: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ :ivar parameters: Input parameters.
+ :vartype parameters: dict[str, object]
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ 'code_configuration': {'required': True},
+ 'parameters': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'long'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'data_bindings': {'key': 'dataBindings', 'type': '{DataBinding}'},
+ 'distribution_configuration': {'key': 'distributionConfiguration', 'type': 'DistributionConfiguration'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'identity_configuration': {'key': 'identityConfiguration', 'type': 'IdentityConfiguration'},
+ 'parameters': {'key': 'parameters', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CommandJob, self).__init__(**kwargs)
+ self.job_type = 'Command' # type: str
+ self.status = None
+ self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None)
+ self.code_configuration = kwargs['code_configuration']
+ self.environment_id = kwargs.get('environment_id', None)
+ self.data_bindings = kwargs.get('data_bindings', None)
+ self.distribution_configuration = kwargs.get('distribution_configuration', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.identity_configuration = kwargs.get('identity_configuration', None)
+ self.parameters = None
+
+
+class Component(msrest.serialization.Model):
+ """Component.
+
+ :param component_type: Component Type, should match the schema. Possible values include:
+ "CommandComponent".
+ :type component_type: str or ~azure_machine_learning_workspaces.models.ComponentType
+ :param display_name: DisplayName of the component on the UI. Defaults to same as name.
+ :type display_name: str
+ :param is_deterministic: Whether or not its deterministic. Defaults to true.
+ :type is_deterministic: bool
+ :param inputs: Defines input ports of the component. The string key is the name of input, which
+ should be a valid Python variable name.
+ :type inputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentInput]
+ :param outputs: Defines output ports of the component. The string key is the name of Output,
+ which should be a valid Python variable name.
+ :type outputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentOutput]
+ """
+
+ _attribute_map = {
+ 'component_type': {'key': 'componentType', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
+ 'inputs': {'key': 'inputs', 'type': '{ComponentInput}'},
+ 'outputs': {'key': 'outputs', 'type': '{ComponentOutput}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Component, self).__init__(**kwargs)
+ self.component_type = kwargs.get('component_type', None)
+ self.display_name = kwargs.get('display_name', None)
+ self.is_deterministic = kwargs.get('is_deterministic', None)
+ self.inputs = kwargs.get('inputs', None)
+ self.outputs = kwargs.get('outputs', None)
+
+
+class ComponentContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class ComponentContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ComponentContainer entities.
+
+ :param value: An array of objects of type ComponentContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.ComponentContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComponentContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class ComponentInput(msrest.serialization.Model):
+ """ComponentInput.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ComponentInputEnum, ComponentInputGeneric, ComponentInputRangedNumber.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'component_input_type': {'Enum': 'ComponentInputEnum', 'Generic': 'ComponentInputGeneric', 'RangedNumber': 'ComponentInputRangedNumber'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentInput, self).__init__(**kwargs)
+ self.component_input_type = None # type: Optional[str]
+ self.optional = kwargs.get('optional', None)
+ self.description = kwargs.get('description', None)
+ self.default = kwargs.get('default', None)
+ self.data_type = kwargs['data_type']
+
+
+class ComponentInputEnum(ComponentInput):
+ """ComponentInputEnum.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ :param enum: The enum definition list for enum types, used to validate the inputs for type
+ enum.
+ :type enum: list[str]
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ 'enum': {'key': 'enum', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentInputEnum, self).__init__(**kwargs)
+ self.component_input_type = 'Enum' # type: str
+ self.enum = kwargs.get('enum', None)
+
+
+class ComponentInputGeneric(ComponentInput):
+ """ComponentInputGeneric.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentInputGeneric, self).__init__(**kwargs)
+ self.component_input_type = 'Generic' # type: str
+
+
+class ComponentInputRangedNumber(ComponentInput):
+ """ComponentInputRangedNumber.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ :param min: The minimum value that can be accepted, used to validate the inputs for type
+ float/int.
+ :type min: str
+ :param max: The maximum value that can be accepted, used to validate the inputs for type
+ float/int.
+ :type max: str
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ 'min': {'key': 'min', 'type': 'str'},
+ 'max': {'key': 'max', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentInputRangedNumber, self).__init__(**kwargs)
+ self.component_input_type = 'RangedNumber' # type: str
+ self.min = kwargs.get('min', None)
+ self.max = kwargs.get('max', None)
+
+
+class ComponentJob(msrest.serialization.Model):
+ """ComponentJob.
+
+ :param compute_binding: Compute definition for job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :param component_id: Reference to component artifact.
+ :type component_id: str
+ :param inputs: Data input set for job.
+ :type inputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentJobInput]
+ :param outputs: Data output set for job.
+ :type outputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentJobOutput]
+ """
+
+ _attribute_map = {
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'component_id': {'key': 'componentId', 'type': 'str'},
+ 'inputs': {'key': 'inputs', 'type': '{ComponentJobInput}'},
+ 'outputs': {'key': 'outputs', 'type': '{ComponentJobOutput}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentJob, self).__init__(**kwargs)
+ self.compute_binding = kwargs.get('compute_binding', None)
+ self.component_id = kwargs.get('component_id', None)
+ self.inputs = kwargs.get('inputs', None)
+ self.outputs = kwargs.get('outputs', None)
+
+
+class ComponentJobInput(msrest.serialization.Model):
+ """ComponentJobInput.
+
+ :param data: Input data definition.
+ :type data: ~azure_machine_learning_workspaces.models.InputData
+ :param input_binding: Reference to an output of another job's ComponentJobInput or reference to
+ a ComponentJobInput. Example "input2".
+ :type input_binding: str
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'InputData'},
+ 'input_binding': {'key': 'inputBinding', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentJobInput, self).__init__(**kwargs)
+ self.data = kwargs.get('data', None)
+ self.input_binding = kwargs.get('input_binding', None)
+
+
+class ComponentJobOutput(msrest.serialization.Model):
+ """ComponentJobOutput.
+
+ :param data: Output data definition.
+ :type data: ~azure_machine_learning_workspaces.models.OutputData
+ :param output_binding: This is to pull the ComponentJobOutput from the overall PipelineOutputs.
+ Example "outputPath".
+ :type output_binding: str
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'OutputData'},
+ 'output_binding': {'key': 'outputBinding', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentJobOutput, self).__init__(**kwargs)
+ self.data = kwargs.get('data', None)
+ self.output_binding = kwargs.get('output_binding', None)
+
+
+class ComponentOutput(msrest.serialization.Model):
+ """ComponentOutput.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: Description for output.
+ :type description: str
+ :param data_type: Required. Component output type. String is used for type extensibility.
+ :type data_type: str
+ """
+
+ _validation = {
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentOutput, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.data_type = kwargs['data_type']
+
+
+class ComponentVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param environment_id: Environment configuration of the component.
+ :type environment_id: str
+ :param code_configuration: Required. Code configuration of the job. Includes CodeArtifactId and
+ Command.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param component: Component definition details.
+ :type component: ~azure_machine_learning_workspaces.models.Component
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'code_configuration': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'environment_id': {'key': 'properties.environmentId', 'type': 'str'},
+ 'code_configuration': {'key': 'properties.codeConfiguration', 'type': 'CodeConfiguration'},
+ 'component': {'key': 'properties.component', 'type': 'Component'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.environment_id = kwargs.get('environment_id', None)
+ self.code_configuration = kwargs['code_configuration']
+ self.component = kwargs.get('component', None)
+ self.generated_by = kwargs.get('generated_by', None)
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ComponentVersion entities.
+
+ :param value: An array of objects of type ComponentVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.ComponentVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComponentVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class ComputeBinding(msrest.serialization.Model):
+ """Compute binding definition.
+
+ :param compute_id: Resource ID of the compute resource.
+ :type compute_id: str
+ :param node_count: Number of nodes.
+ :type node_count: int
+ :param is_local: Set to true for jobs running on local compute.
+ :type is_local: bool
+ """
+
+ _attribute_map = {
+ 'compute_id': {'key': 'computeId', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'is_local': {'key': 'isLocal', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeBinding, self).__init__(**kwargs)
+ self.compute_id = kwargs.get('compute_id', None)
+ self.node_count = kwargs.get('node_count', None)
+ self.is_local = kwargs.get('is_local', None)
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(**kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.endpoint_uri = kwargs.get('endpoint_uri', None)
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = kwargs.get('operation_name', None)
+ self.operation_time = kwargs.get('operation_time', None)
+ self.operation_status = kwargs.get('operation_status', None)
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = kwargs.get('vm_size', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared")
+ self.ssh_settings = kwargs.get('ssh_settings', None)
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal")
+ self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None)
+ self.setup_scripts = kwargs.get('setup_scripts', None)
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled")
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = kwargs.get('admin_public_key', None)
+
+
+class Resource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.type = None
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+
+
+class ComputeResource(Resource):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class ContainerRegistry(msrest.serialization.Model):
+ """ContainerRegistry.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerRegistry, self).__init__(**kwargs)
+ self.address = kwargs.get('address', None)
+ self.username = kwargs.get('username', None)
+ self.password = kwargs.get('password', None)
+
+
+class ContainerRegistryResponse(msrest.serialization.Model):
+ """ContainerRegistryResponse.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerRegistryResponse, self).__init__(**kwargs)
+ self.address = kwargs.get('address', None)
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The number of CPU cores on the container.
+ :type cpu: float
+ :param memory_in_gb: The amount of memory on the container in GB.
+ :type memory_in_gb: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = kwargs.get('cpu', None)
+ self.memory_in_gb = kwargs.get('memory_in_gb', None)
+ self.gpu = kwargs.get('gpu', None)
+ self.fpga = kwargs.get('fpga', None)
+
+
+class EnvironmentImageRequest(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageRequest, self).__init__(**kwargs)
+ self.driver_program = kwargs.get('driver_program', None)
+ self.assets = kwargs.get('assets', None)
+ self.model_ids = kwargs.get('model_ids', None)
+ self.models = kwargs.get('models', None)
+ self.environment = kwargs.get('environment', None)
+ self.environment_reference = kwargs.get('environment_reference', None)
+
+
+class CreateServiceRequestEnvironmentImageRequest(EnvironmentImageRequest):
+ """The Environment, models and assets needed for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateServiceRequestEnvironmentImageRequest, self).__init__(**kwargs)
+
+
+class CreateServiceRequestKeys(AuthKeys):
+ """The authentication keys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateServiceRequestKeys, self).__init__(**kwargs)
+
+
+class CsvExportSummary(ExportSummary):
+ """CsvExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ 'container_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CsvExportSummary, self).__init__(**kwargs)
+ self.format = 'CSV' # type: str
+ self.snapshot_path = None
+ self.container_name = None
+
+
+class DataBinding(msrest.serialization.Model):
+ """Data binding definition.
+
+ :param source_data_reference: Reference to source data artifact.
+ :type source_data_reference: str
+ :param local_reference: Location of data inside the container process.
+ :type local_reference: str
+ :param mode: Mechanism for accessing the data artifact. Possible values include: "Mount",
+ "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ """
+
+ _attribute_map = {
+ 'source_data_reference': {'key': 'sourceDataReference', 'type': 'str'},
+ 'local_reference': {'key': 'localReference', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataBinding, self).__init__(**kwargs)
+ self.source_data_reference = kwargs.get('source_data_reference', None)
+ self.local_reference = kwargs.get('local_reference', None)
+ self.mode = kwargs.get('mode', None)
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+
+
+class DataContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param description:
+ :type description: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+ self.description = kwargs.get('description', None)
+
+
+class DataContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataContainer entities.
+
+ :param value: An array of objects of type DataContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.DataContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[DataContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(**kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(**kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None)
+
+
+class DataPathAssetReference(AssetReferenceBase):
+ """DataPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param path:
+ :type path: str
+ :param datastore_id:
+ :type datastore_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'DataPath' # type: str
+ self.path = kwargs.get('path', None)
+ self.datastore_id = kwargs.get('datastore_id', None)
+
+
+class DatasetExportSummary(ExportSummary):
+ """DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar labeled_asset_name: The unique name of the labeled data asset.
+ :vartype labeled_asset_name: str
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ 'labeled_asset_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatasetExportSummary, self).__init__(**kwargs)
+ self.format = 'Dataset' # type: str
+ self.labeled_asset_name = None
+
+
+class DatasetReference(msrest.serialization.Model):
+ """The dataset reference object.
+
+ :param name: The name of the dataset reference.
+ :type name: str
+ :param id: The id of the dataset reference.
+ :type id: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatasetReference, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.id = kwargs.get('id', None)
+
+
+class DataSettings(msrest.serialization.Model):
+ """This class represents the Dataset Json that is passed into Jasmine for training.
+
+ :param training_data: The training_data.
+ :type training_data: ~azure_machine_learning_workspaces.models.TrainingDataSettings
+ :param validation_data: The validation_data.
+ :type validation_data: ~azure_machine_learning_workspaces.models.ValidationDataSettings
+ """
+
+ _attribute_map = {
+ 'training_data': {'key': 'trainingData', 'type': 'TrainingDataSettings'},
+ 'validation_data': {'key': 'validationData', 'type': 'ValidationDataSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataSettings, self).__init__(**kwargs)
+ self.training_data = kwargs.get('training_data', None)
+ self.validation_data = kwargs.get('validation_data', None)
+
+
+class DatastoreContents(msrest.serialization.Model):
+ """DatastoreContents.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_contents_type: Required. Storage type backing the datastore. Possible values
+ include: "AzureBlob", "AzureDataLake", "AzureDataLakeGen2", "AzureFile", "AzureMySql",
+ "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type datastore_contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param azure_data_lake: Azure Data Lake (Gen1/2) storage information.
+ :type azure_data_lake: ~azure_machine_learning_workspaces.models.AzureDataLakeSection
+ :param azure_my_sql: Azure Database for MySQL information.
+ :type azure_my_sql: ~azure_machine_learning_workspaces.models.AzureMySqlSection
+ :param azure_postgre_sql: Azure Database for PostgreSQL information.
+ :type azure_postgre_sql: ~azure_machine_learning_workspaces.models.AzurePostgreSqlSection
+ :param azure_sql_database: Azure SQL Database information.
+ :type azure_sql_database: ~azure_machine_learning_workspaces.models.AzureSqlDatabaseSection
+ :param azure_storage: Azure storage account (blobs, files) information.
+ :type azure_storage: ~azure_machine_learning_workspaces.models.AzureStorageSection
+ :param gluster_fs: GlusterFS volume information.
+ :type gluster_fs: ~azure_machine_learning_workspaces.models.GlusterFsSection
+ """
+
+ _validation = {
+ 'datastore_contents_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'datastore_contents_type': {'key': 'datastoreContentsType', 'type': 'str'},
+ 'azure_data_lake': {'key': 'azureDataLake', 'type': 'AzureDataLakeSection'},
+ 'azure_my_sql': {'key': 'azureMySql', 'type': 'AzureMySqlSection'},
+ 'azure_postgre_sql': {'key': 'azurePostgreSql', 'type': 'AzurePostgreSqlSection'},
+ 'azure_sql_database': {'key': 'azureSqlDatabase', 'type': 'AzureSqlDatabaseSection'},
+ 'azure_storage': {'key': 'azureStorage', 'type': 'AzureStorageSection'},
+ 'gluster_fs': {'key': 'glusterFs', 'type': 'GlusterFsSection'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreContents, self).__init__(**kwargs)
+ self.datastore_contents_type = kwargs['datastore_contents_type']
+ self.azure_data_lake = kwargs.get('azure_data_lake', None)
+ self.azure_my_sql = kwargs.get('azure_my_sql', None)
+ self.azure_postgre_sql = kwargs.get('azure_postgre_sql', None)
+ self.azure_sql_database = kwargs.get('azure_sql_database', None)
+ self.azure_storage = kwargs.get('azure_storage', None)
+ self.gluster_fs = kwargs.get('gluster_fs', None)
+
+
+class DatastoreCredentials(msrest.serialization.Model):
+ """DatastoreCredentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_credentials_type: Required. Credential type used to authentication with
+ storage. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type datastore_credentials_type: str or
+ ~azure_machine_learning_workspaces.models.CredentialsType
+ :param account_key: Storage account key authentication.
+ :type account_key: ~azure_machine_learning_workspaces.models.AccountKeySection
+ :param certificate: Service principal certificate authentication.
+ :type certificate: ~azure_machine_learning_workspaces.models.CertificateSection
+ :param sas: Storage container SAS token authentication.
+ :type sas: ~azure_machine_learning_workspaces.models.SasSection
+ :param service_principal: Service principal password authentication.
+ :type service_principal: ~azure_machine_learning_workspaces.models.ServicePrincipalSection
+ :param sql_admin: SQL user/password authentication.
+ :type sql_admin: ~azure_machine_learning_workspaces.models.SqlAdminSection
+ """
+
+ _validation = {
+ 'datastore_credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'datastore_credentials_type': {'key': 'datastoreCredentialsType', 'type': 'str'},
+ 'account_key': {'key': 'accountKey', 'type': 'AccountKeySection'},
+ 'certificate': {'key': 'certificate', 'type': 'CertificateSection'},
+ 'sas': {'key': 'sas', 'type': 'SasSection'},
+ 'service_principal': {'key': 'servicePrincipal', 'type': 'ServicePrincipalSection'},
+ 'sql_admin': {'key': 'sqlAdmin', 'type': 'SqlAdminSection'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreCredentials, self).__init__(**kwargs)
+ self.datastore_credentials_type = kwargs['datastore_credentials_type']
+ self.account_key = kwargs.get('account_key', None)
+ self.certificate = kwargs.get('certificate', None)
+ self.sas = kwargs.get('sas', None)
+ self.service_principal = kwargs.get('service_principal', None)
+ self.sql_admin = kwargs.get('sql_admin', None)
+
+
+class DatastorePropertiesResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param contents: Required. Reference to the datastore storage contents.
+ :type contents: ~azure_machine_learning_workspaces.models.DatastoreContents
+ :ivar has_been_validated: Whether the service has validated access to the datastore with the
+ provided credentials.
+ :vartype has_been_validated: bool
+ :param is_default: Whether this datastore is the default for the workspace.
+ :type is_default: bool
+ :param linked_info: Information about the datastore origin, if linked.
+ :type linked_info: ~azure_machine_learning_workspaces.models.LinkedInfo
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'contents': {'required': True},
+ 'has_been_validated': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'contents': {'key': 'properties.contents', 'type': 'DatastoreContents'},
+ 'has_been_validated': {'key': 'properties.hasBeenValidated', 'type': 'bool'},
+ 'is_default': {'key': 'properties.isDefault', 'type': 'bool'},
+ 'linked_info': {'key': 'properties.linkedInfo', 'type': 'LinkedInfo'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastorePropertiesResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.contents = kwargs['contents']
+ self.has_been_validated = None
+ self.is_default = kwargs.get('is_default', None)
+ self.linked_info = kwargs.get('linked_info', None)
+ self.properties = kwargs.get('properties', None)
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class DatastorePropertiesResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DatastoreProperties entities.
+
+ :param value: An array of objects of type DatastoreProperties.
+ :type value: list[~azure_machine_learning_workspaces.models.DatastorePropertiesResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[DatastorePropertiesResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastorePropertiesResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class DataVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param dataset_type: The Format of dataset. Possible values include: "Simple", "Dataflow".
+ :type dataset_type: str or ~azure_machine_learning_workspaces.models.DatasetType
+ :param datastore_id: The asset datastoreId.
+ :type datastore_id: str
+ :param asset_path: DEPRECATED - use
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead.
+ :type asset_path: ~azure_machine_learning_workspaces.models.AssetPath
+ :param path: The path of the file/directory.
+ :type path: str
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'dataset_type': {'key': 'properties.datasetType', 'type': 'str'},
+ 'datastore_id': {'key': 'properties.datastoreId', 'type': 'str'},
+ 'asset_path': {'key': 'properties.assetPath', 'type': 'AssetPath'},
+ 'path': {'key': 'properties.path', 'type': 'str'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.dataset_type = kwargs.get('dataset_type', None)
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.asset_path = kwargs.get('asset_path', None)
+ self.path = kwargs.get('path', None)
+ self.generated_by = kwargs.get('generated_by', None)
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class DataVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataVersion entities.
+
+ :param value: An array of objects of type DataVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.DataVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[DataVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class DeploymentLogs(msrest.serialization.Model):
+ """DeploymentLogs.
+
+ :param content:
+ :type content: str
+ """
+
+ _attribute_map = {
+ 'content': {'key': 'content', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DeploymentLogs, self).__init__(**kwargs)
+ self.content = kwargs.get('content', None)
+
+
+class DeploymentLogsRequest(msrest.serialization.Model):
+ """DeploymentLogsRequest.
+
+ :param container_type: The type of container to retrieve logs from. Possible values include:
+ "StorageInitializer", "InferenceServer".
+ :type container_type: str or ~azure_machine_learning_workspaces.models.ContainerType
+ :param tail: The maximum number of lines to tail.
+ :type tail: int
+ """
+
+ _attribute_map = {
+ 'container_type': {'key': 'containerType', 'type': 'str'},
+ 'tail': {'key': 'tail', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DeploymentLogsRequest, self).__init__(**kwargs)
+ self.container_type = kwargs.get('container_type', None)
+ self.tail = kwargs.get('tail', None)
+
+
+class DistributionConfiguration(msrest.serialization.Model):
+ """DistributionConfiguration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Mpi, PyTorch, TensorFlow.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'TensorFlow': 'TensorFlow'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DistributionConfiguration, self).__init__(**kwargs)
+ self.distribution_type = None # type: Optional[str]
+
+
+class DockerSpecification(msrest.serialization.Model):
+ """Class to represent configuration settings for Docker.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DockerBuild, DockerImage.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ }
+
+ _subtype_map = {
+ 'docker_specification_type': {'Build': 'DockerBuild', 'Image': 'DockerImage'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerSpecification, self).__init__(**kwargs)
+ self.docker_specification_type = None # type: Optional[str]
+ self.platform = kwargs.get('platform', None)
+
+
+class DockerBuild(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param dockerfile: Required. Docker command line instructions to assemble an image.
+
+
+ .. raw:: html
+
+ .
+ :type dockerfile: str
+ :param context: Path to a snapshot of the Docker Context. This property is only valid if
+ Dockerfile is specified.
+ The path is relative to the asset path which must contain a single Blob URI value.
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path:code:``.
+ :type context: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'dockerfile': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'dockerfile': {'key': 'dockerfile', 'type': 'str'},
+ 'context': {'key': 'context', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerBuild, self).__init__(**kwargs)
+ self.docker_specification_type = 'Build' # type: str
+ self.dockerfile = kwargs['dockerfile']
+ self.context = kwargs.get('context', None)
+
+
+class DockerImage(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param docker_image_uri: Required. Image name of a custom base image.
+
+
+ .. raw:: html
+
+ .
+ :type docker_image_uri: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'docker_image_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'docker_image_uri': {'key': 'dockerImageUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerImage, self).__init__(**kwargs)
+ self.docker_specification_type = 'Image' # type: str
+ self.docker_image_uri = kwargs['docker_image_uri']
+
+
+class DockerImagePlatform(msrest.serialization.Model):
+ """DockerImagePlatform.
+
+ :param operating_system_type: The OS type the Environment. Possible values include: "Linux",
+ "Windows".
+ :type operating_system_type: str or
+ ~azure_machine_learning_workspaces.models.OperatingSystemType
+ """
+
+ _attribute_map = {
+ 'operating_system_type': {'key': 'operatingSystemType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerImagePlatform, self).__init__(**kwargs)
+ self.operating_system_type = kwargs.get('operating_system_type', None)
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = kwargs['status']
+ self.key_vault_properties = kwargs['key_vault_properties']
+
+
+class EndpointAuthKeys(msrest.serialization.Model):
+ """EndpointAuthKeys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EndpointAuthKeys, self).__init__(**kwargs)
+ self.primary_key = kwargs.get('primary_key', None)
+ self.secondary_key = kwargs.get('secondary_key', None)
+
+
+class EndpointAuthToken(msrest.serialization.Model):
+ """Service Token.
+
+ :param access_token: Access token.
+ :type access_token: str
+ :param token_type: Access token type.
+ :type token_type: str
+ :param expiry_time_utc: Access token expiry time (UTC).
+ :type expiry_time_utc: long
+ :param refresh_after_time_utc: Refresh access token after time (UTC).
+ :type refresh_after_time_utc: long
+ """
+
+ _attribute_map = {
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'},
+ 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EndpointAuthToken, self).__init__(**kwargs)
+ self.access_token = kwargs.get('access_token', None)
+ self.token_type = kwargs.get('token_type', None)
+ self.expiry_time_utc = kwargs.get('expiry_time_utc', None)
+ self.refresh_after_time_utc = kwargs.get('refresh_after_time_utc', None)
+
+
+class EnvironmentContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param description:
+ :type description: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+ self.description = kwargs.get('description', None)
+
+
+class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentContainer entities.
+
+ :param value: An array of objects of type EnvironmentContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.EnvironmentContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[EnvironmentContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class ModelEnvironmentDefinition(msrest.serialization.Model):
+ """ModelEnvironmentDefinition.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinition, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.version = kwargs.get('version', None)
+ self.python = kwargs.get('python', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.docker = kwargs.get('docker', None)
+ self.spark = kwargs.get('spark', None)
+ self.r = kwargs.get('r', None)
+ self.inferencing_stack_version = kwargs.get('inferencing_stack_version', None)
+
+
+class EnvironmentImageRequestEnvironment(ModelEnvironmentDefinition):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironment, self).__init__(**kwargs)
+
+
+class EnvironmentReference(msrest.serialization.Model):
+ """EnvironmentReference.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentReference, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.version = kwargs.get('version', None)
+
+
+class EnvironmentImageRequestEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironmentReference, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionResponse(msrest.serialization.Model):
+ """ModelEnvironmentDefinitionResponse.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponse, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.version = kwargs.get('version', None)
+ self.python = kwargs.get('python', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.docker = kwargs.get('docker', None)
+ self.spark = kwargs.get('spark', None)
+ self.r = kwargs.get('r', None)
+ self.inferencing_stack_version = kwargs.get('inferencing_stack_version', None)
+
+
+class EnvironmentImageResponseEnvironment(ModelEnvironmentDefinitionResponse):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironment, self).__init__(**kwargs)
+
+
+class EnvironmentImageResponseEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironmentReference, self).__init__(**kwargs)
+
+
+class EnvironmentSpecificationVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar environment_specification_type: Environment specification is either user managed or
+ curated by the Azure ML service
+
+
+ .. raw:: html
+
+ . Possible values include: "Curated", "UserCreated".
+ :vartype environment_specification_type: str or
+ ~azure_machine_learning_workspaces.models.EnvironmentSpecificationType
+ :param docker: Class to represent configuration settings for Docker.
+ :type docker: ~azure_machine_learning_workspaces.models.DockerSpecification
+ :param conda_file: Standard configuration file used by conda that lets you install any kind of
+ package, including Python, R, and C/C++ packages
+
+
+ .. raw:: html
+
+ .
+ :type conda_file: str
+ :param inference_container_properties: Defines configuration specific to inference.
+ :type inference_container_properties:
+ ~azure_machine_learning_workspaces.models.InferenceContainerProperties
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'environment_specification_type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'environment_specification_type': {'key': 'properties.environmentSpecificationType', 'type': 'str'},
+ 'docker': {'key': 'properties.docker', 'type': 'DockerSpecification'},
+ 'conda_file': {'key': 'properties.condaFile', 'type': 'str'},
+ 'inference_container_properties': {'key': 'properties.inferenceContainerProperties', 'type': 'InferenceContainerProperties'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.environment_specification_type = None
+ self.docker = kwargs.get('docker', None)
+ self.conda_file = kwargs.get('conda_file', None)
+ self.inference_container_properties = kwargs.get('inference_container_properties', None)
+ self.generated_by = kwargs.get('generated_by', None)
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class EnvironmentSpecificationVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentSpecificationVersion entities.
+
+ :param value: An array of objects of type EnvironmentSpecificationVersion.
+ :type value:
+ list[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[EnvironmentSpecificationVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """Error detail information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code: Required. Error code.
+ :type code: str
+ :param message: Required. Error message.
+ :type message: str
+ """
+
+ _validation = {
+ 'code': {'required': True},
+ 'message': {'required': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = kwargs['code']
+ self.message = kwargs['message']
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = kwargs['retail_price']
+ self.os_type = kwargs['os_type']
+ self.vm_tier = kwargs['vm_tier']
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = kwargs['billing_currency']
+ self.unit_of_measure = kwargs['unit_of_measure']
+ self.values = kwargs['values']
+
+
+class EvaluationConfiguration(msrest.serialization.Model):
+ """EvaluationConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param primary_metric_name: Required.
+ :type primary_metric_name: str
+ :param primary_metric_goal: Required. Defines supported metric goals for hyperparameter tuning.
+ Possible values include: "Minimize", "Maximize".
+ :type primary_metric_goal: str or ~azure_machine_learning_workspaces.models.PrimaryMetricGoal
+ """
+
+ _validation = {
+ 'primary_metric_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'primary_metric_goal': {'required': True},
+ }
+
+ _attribute_map = {
+ 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
+ 'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EvaluationConfiguration, self).__init__(**kwargs)
+ self.primary_metric_name = kwargs['primary_metric_name']
+ self.primary_metric_goal = kwargs['primary_metric_goal']
+
+
+class ExperimentLimits(msrest.serialization.Model):
+ """Limit settings on AutoML Experiment.
+
+ :param max_trials: Number of iterations.
+ :type max_trials: int
+ :param experiment_timeout_in_minutes: Experiment Timeout.
+ :type experiment_timeout_in_minutes: int
+ :param max_concurrent_trials: Maximum Concurrent iterations.
+ :type max_concurrent_trials: int
+ :param max_cores_per_trial: Max cores per iteration.
+ :type max_cores_per_trial: int
+ """
+
+ _attribute_map = {
+ 'max_trials': {'key': 'maxTrials', 'type': 'int'},
+ 'experiment_timeout_in_minutes': {'key': 'experimentTimeoutInMinutes', 'type': 'int'},
+ 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
+ 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ExperimentLimits, self).__init__(**kwargs)
+ self.max_trials = kwargs.get('max_trials', None)
+ self.experiment_timeout_in_minutes = kwargs.get('experiment_timeout_in_minutes', None)
+ self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
+ self.max_cores_per_trial = kwargs.get('max_cores_per_trial', None)
+
+
+class FeaturizationSettings(msrest.serialization.Model):
+ """Featurization Configuration.
+
+ :param featurization_config: Featurization config json string.
+ :type featurization_config: str
+ :param enable_dnn_featurization: Enable Dnn featurization.
+ :type enable_dnn_featurization: bool
+ """
+
+ _attribute_map = {
+ 'featurization_config': {'key': 'featurizationConfig', 'type': 'str'},
+ 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FeaturizationSettings, self).__init__(**kwargs)
+ self.featurization_config = kwargs.get('featurization_config', None)
+ self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', None)
+
+
+class ForecastingSettings(msrest.serialization.Model):
+ """Forecasting specific parameters.
+
+ :param forecasting_country_or_region: Country or region for holidays for forecasting tasks.
+ These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'.
+ :type forecasting_country_or_region: str
+ :param time_column_name: Time column name.
+ :type time_column_name: str
+ :param target_lags: Target Lags.
+ :type target_lags: list[int]
+ :param target_rolling_window_size: Forecasting Window Size.
+ :type target_rolling_window_size: int
+ :param forecast_horizon: Forecasting Horizon.
+ :type forecast_horizon: int
+ :param time_series_id_column_names: Time series column names.
+ :type time_series_id_column_names: list[str]
+ :param enable_dnn_training: Enable recommendation of DNN models.
+ :type enable_dnn_training: bool
+ """
+
+ _attribute_map = {
+ 'forecasting_country_or_region': {'key': 'forecastingCountryOrRegion', 'type': 'str'},
+ 'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
+ 'target_lags': {'key': 'targetLags', 'type': '[int]'},
+ 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'int'},
+ 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'int'},
+ 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
+ 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ForecastingSettings, self).__init__(**kwargs)
+ self.forecasting_country_or_region = kwargs.get('forecasting_country_or_region', None)
+ self.time_column_name = kwargs.get('time_column_name', None)
+ self.target_lags = kwargs.get('target_lags', None)
+ self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None)
+ self.forecast_horizon = kwargs.get('forecast_horizon', None)
+ self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None)
+ self.enable_dnn_training = kwargs.get('enable_dnn_training', None)
+
+
+class GeneralSettings(msrest.serialization.Model):
+ """General Settings to submit an AutoML Job.
+
+ :param primary_metric: Primary optimization metric. Possible values include: "AUC_weighted",
+ "Accuracy", "Norm_macro_recall", "Average_precision_score_weighted",
+ "Precision_score_weighted", "Spearman_correlation", "Normalized_root_mean_squared_error",
+ "R2_score", "Normalized_mean_absolute_error", "Normalized_root_mean_squared_log_error".
+ :type primary_metric: str or ~azure_machine_learning_workspaces.models.OptimizationMetric
+ :param enable_model_explainability: Flag to turn on explainability on best model.
+ :type enable_model_explainability: bool
+ :param task_type: Type of AutoML Experiment [Classification, Regression, Forecasting]. Possible
+ values include: "Classification", "Regression", "Forecasting".
+ :type task_type: str or ~azure_machine_learning_workspaces.models.TaskType
+ """
+
+ _attribute_map = {
+ 'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
+ 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
+ 'task_type': {'key': 'taskType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GeneralSettings, self).__init__(**kwargs)
+ self.primary_metric = kwargs.get('primary_metric', None)
+ self.enable_model_explainability = kwargs.get('enable_model_explainability', None)
+ self.task_type = kwargs.get('task_type', None)
+
+
+class GlusterFsSection(msrest.serialization.Model):
+ """GlusterFsSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param server_address: Required. GlusterFS server address (can be the IP address or server
+ name).
+ :type server_address: str
+ :param volume_name: Required. GlusterFS volume name.
+ :type volume_name: str
+ """
+
+ _validation = {
+ 'server_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'volume_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'server_address': {'key': 'serverAddress', 'type': 'str'},
+ 'volume_name': {'key': 'volumeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GlusterFsSection, self).__init__(**kwargs)
+ self.server_address = kwargs['server_address']
+ self.volume_name = kwargs['volume_name']
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(**kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class IdAssetReference(AssetReferenceBase):
+ """IdAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param asset_id: Required.
+ :type asset_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ 'asset_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'asset_id': {'key': 'assetId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'Id' # type: str
+ self.asset_id = kwargs['asset_id']
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = kwargs.get('type', None)
+ self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
+
+
+class ImageAsset(msrest.serialization.Model):
+ """An Image asset.
+
+ :param id: The Asset Id.
+ :type id: str
+ :param mime_type: The mime type.
+ :type mime_type: str
+ :param url: The Url of the Asset.
+ :type url: str
+ :param unpack: Whether the Asset is unpacked.
+ :type unpack: bool
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ImageAsset, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.mime_type = kwargs.get('mime_type', None)
+ self.url = kwargs.get('url', None)
+ self.unpack = kwargs.get('unpack', None)
+
+
+class InferenceContainerProperties(msrest.serialization.Model):
+ """InferenceContainerProperties.
+
+ :param liveness_route: The route to check the liveness of the inference server container.
+ :type liveness_route: ~azure_machine_learning_workspaces.models.Route
+ :param readiness_route: The route to check the readiness of the inference server container.
+ :type readiness_route: ~azure_machine_learning_workspaces.models.Route
+ :param scoring_route: The port to send the scoring requests to, within the inference server
+ container.
+ :type scoring_route: ~azure_machine_learning_workspaces.models.Route
+ """
+
+ _attribute_map = {
+ 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'},
+ 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'},
+ 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(InferenceContainerProperties, self).__init__(**kwargs)
+ self.liveness_route = kwargs.get('liveness_route', None)
+ self.readiness_route = kwargs.get('readiness_route', None)
+ self.scoring_route = kwargs.get('scoring_route', None)
+
+
+class InputData(msrest.serialization.Model):
+ """InputData.
+
+ :param dataset_id: Dataset registration id.
+ :type dataset_id: str
+ :param mode: Mode type, can be set for DatasetId. Possible values include: "Mount", "Download",
+ "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ :param value: Literal Value of a data binding. Example "42".
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'dataset_id': {'key': 'datasetId', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(InputData, self).__init__(**kwargs)
+ self.dataset_id = kwargs.get('dataset_id', None)
+ self.mode = kwargs.get('mode', None)
+ self.value = kwargs.get('value', None)
+
+
+class JobBaseInteractionEndpoints(msrest.serialization.Model):
+ """Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+For local jobs, a job endpoint will have a value of FileStreamObject.
+
+ :param tracking:
+ :type tracking: str
+ :param studio:
+ :type studio: str
+ :param grafana:
+ :type grafana: str
+ :param tensorboard:
+ :type tensorboard: str
+ :param local:
+ :type local: str
+ """
+
+ _attribute_map = {
+ 'tracking': {'key': 'Tracking', 'type': 'str'},
+ 'studio': {'key': 'Studio', 'type': 'str'},
+ 'grafana': {'key': 'Grafana', 'type': 'str'},
+ 'tensorboard': {'key': 'Tensorboard', 'type': 'str'},
+ 'local': {'key': 'Local', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobBaseInteractionEndpoints, self).__init__(**kwargs)
+ self.tracking = kwargs.get('tracking', None)
+ self.studio = kwargs.get('studio', None)
+ self.grafana = kwargs.get('grafana', None)
+ self.tensorboard = kwargs.get('tensorboard', None)
+ self.local = kwargs.get('local', None)
+
+
+class JobBaseResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :param properties: Required. Job base definition.
+ :type properties: ~azure_machine_learning_workspaces.models.JobBase
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'JobBase'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobBaseResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class JobBaseResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of JobBase entities.
+
+ :param value: An array of objects of type JobBase.
+ :type value: list[~azure_machine_learning_workspaces.models.JobBaseResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[JobBaseResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class JobOutput(msrest.serialization.Model):
+ """JobOutput.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar datastore_id: ARM ID of the datastore where the job logs and artifacts are stored, or
+ null for the default container ("azureml") in the workspace's storage account.
+ :vartype datastore_id: str
+ :ivar path: Path within the datastore to the job logs and artifacts.
+ :vartype path: str
+ """
+
+ _validation = {
+ 'datastore_id': {'readonly': True},
+ 'path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobOutput, self).__init__(**kwargs)
+ self.datastore_id = None
+ self.path = None
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = kwargs['key_vault_arm_id']
+ self.key_identifier = kwargs['key_identifier']
+ self.identity_client_id = kwargs.get('identity_client_id', None)
+
+
+class LabelCategory(msrest.serialization.Model):
+ """Label category definition.
+
+ :param display_name: Display name of the label category.
+ :type display_name: str
+ :param allow_multi_select: Indicates whether it is allowed to select multiple classes in this
+ category.
+ :type allow_multi_select: bool
+ :param classes: Dictionary of label classes in this category.
+ :type classes: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'allow_multi_select': {'key': 'allowMultiSelect', 'type': 'bool'},
+ 'classes': {'key': 'classes', 'type': '{LabelClass}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelCategory, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.allow_multi_select = kwargs.get('allow_multi_select', None)
+ self.classes = kwargs.get('classes', None)
+
+
+class LabelClass(msrest.serialization.Model):
+ """Label class definition.
+
+ :param display_name: Display name of the label class.
+ :type display_name: str
+ :param subclasses: Dictionary of subclasses of the label class.
+ :type subclasses: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelClass, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.subclasses = kwargs.get('subclasses', None)
+
+
+class LabelingDatasetConfiguration(msrest.serialization.Model):
+ """Labeling dataset configuration definition.
+
+ :param asset_name: Name of the data asset to perform labeling.
+ :type asset_name: str
+ :param incremental_dataset_refresh_enabled: Indicates whether to enable incremental dataset
+ refresh.
+ :type incremental_dataset_refresh_enabled: bool
+ :param dataset_version: AML dataset version.
+ :type dataset_version: str
+ """
+
+ _attribute_map = {
+ 'asset_name': {'key': 'assetName', 'type': 'str'},
+ 'incremental_dataset_refresh_enabled': {'key': 'incrementalDatasetRefreshEnabled', 'type': 'bool'},
+ 'dataset_version': {'key': 'datasetVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingDatasetConfiguration, self).__init__(**kwargs)
+ self.asset_name = kwargs.get('asset_name', None)
+ self.incremental_dataset_refresh_enabled = kwargs.get('incremental_dataset_refresh_enabled', None)
+ self.dataset_version = kwargs.get('dataset_version', None)
+
+
+class LabelingJob(JobBase):
+ """Labeling job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param label_categories: Label categories of the job.
+ :type label_categories: dict[str, ~azure_machine_learning_workspaces.models.LabelCategory]
+ :param job_instructions: Labeling instructions of the job.
+ :type job_instructions: ~azure_machine_learning_workspaces.models.LabelingJobInstructions
+ :param dataset_configuration: Configuration of dataset used in the job.
+ :type dataset_configuration:
+ ~azure_machine_learning_workspaces.models.LabelingDatasetConfiguration
+ :param ml_assist_configuration: Configuration of MLAssist feature in the job.
+ :type ml_assist_configuration: ~azure_machine_learning_workspaces.models.MlAssistConfiguration
+ :param labeling_job_media_properties: Properties of a labeling job.
+ :type labeling_job_media_properties:
+ ~azure_machine_learning_workspaces.models.LabelingJobMediaProperties
+ :ivar project_id: Internal id of the job(Previously called project).
+ :vartype project_id: str
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :ivar progress_metrics: Progress metrics of the job.
+ :vartype progress_metrics: ~azure_machine_learning_workspaces.models.ProgressMetrics
+ :ivar status_messages: Status messages of the job.
+ :vartype status_messages: list[~azure_machine_learning_workspaces.models.StatusMessage]
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'project_id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'progress_metrics': {'readonly': True},
+ 'status_messages': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'},
+ 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'},
+ 'dataset_configuration': {'key': 'datasetConfiguration', 'type': 'LabelingDatasetConfiguration'},
+ 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MlAssistConfiguration'},
+ 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'},
+ 'project_id': {'key': 'projectId', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'},
+ 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJob, self).__init__(**kwargs)
+ self.job_type = 'Labeling' # type: str
+ self.label_categories = kwargs.get('label_categories', None)
+ self.job_instructions = kwargs.get('job_instructions', None)
+ self.dataset_configuration = kwargs.get('dataset_configuration', None)
+ self.ml_assist_configuration = kwargs.get('ml_assist_configuration', None)
+ self.labeling_job_media_properties = kwargs.get('labeling_job_media_properties', None)
+ self.project_id = None
+ self.status = None
+ self.progress_metrics = None
+ self.status_messages = None
+
+
+class LabelingJobMediaProperties(msrest.serialization.Model):
+ """Properties of a labeling job.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobMediaProperties, self).__init__(**kwargs)
+ self.media_type = None # type: Optional[str]
+
+
+class LabelingJobImageProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for image data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of image labeling job. Possible values include:
+ "Classification", "BoundingBox", "InstanceSegmentation".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.ImageAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobImageProperties, self).__init__(**kwargs)
+ self.media_type = 'Image' # type: str
+ self.annotation_type = kwargs.get('annotation_type', None)
+
+
+class LabelingJobInstructions(msrest.serialization.Model):
+ """Instructions for labeling job.
+
+ :param uri: The link to a page with detailed labeling instructions for labelers.
+ :type uri: str
+ """
+
+ _attribute_map = {
+ 'uri': {'key': 'uri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobInstructions, self).__init__(**kwargs)
+ self.uri = kwargs.get('uri', None)
+
+
+class LabelingJobResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param label_categories: Label categories of the job.
+ :type label_categories: dict[str, ~azure_machine_learning_workspaces.models.LabelCategory]
+ :param job_instructions: Labeling instructions of the job.
+ :type job_instructions: ~azure_machine_learning_workspaces.models.LabelingJobInstructions
+ :param dataset_configuration: Configuration of dataset used in the job.
+ :type dataset_configuration:
+ ~azure_machine_learning_workspaces.models.LabelingDatasetConfiguration
+ :param ml_assist_configuration: Configuration of MLAssist feature in the job.
+ :type ml_assist_configuration: ~azure_machine_learning_workspaces.models.MlAssistConfiguration
+ :param labeling_job_media_properties: Properties of a labeling job.
+ :type labeling_job_media_properties:
+ ~azure_machine_learning_workspaces.models.LabelingJobMediaProperties
+ :ivar project_id: Internal id of the job(Previously called project).
+ :vartype project_id: str
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :ivar progress_metrics: Progress metrics of the job.
+ :vartype progress_metrics: ~azure_machine_learning_workspaces.models.ProgressMetrics
+ :ivar status_messages: Status messages of the job.
+ :vartype status_messages: list[~azure_machine_learning_workspaces.models.StatusMessage]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'project_id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'progress_metrics': {'readonly': True},
+ 'status_messages': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'job_type': {'key': 'properties.jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'properties.interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'label_categories': {'key': 'properties.labelCategories', 'type': '{LabelCategory}'},
+ 'job_instructions': {'key': 'properties.jobInstructions', 'type': 'LabelingJobInstructions'},
+ 'dataset_configuration': {'key': 'properties.datasetConfiguration', 'type': 'LabelingDatasetConfiguration'},
+ 'ml_assist_configuration': {'key': 'properties.mlAssistConfiguration', 'type': 'MlAssistConfiguration'},
+ 'labeling_job_media_properties': {'key': 'properties.labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'},
+ 'project_id': {'key': 'properties.projectId', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ 'progress_metrics': {'key': 'properties.progressMetrics', 'type': 'ProgressMetrics'},
+ 'status_messages': {'key': 'properties.statusMessages', 'type': '[StatusMessage]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.job_type = None # type: Optional[str]
+ self.provisioning_state = None
+ self.interaction_endpoints = None
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+ self.label_categories = kwargs.get('label_categories', None)
+ self.job_instructions = kwargs.get('job_instructions', None)
+ self.dataset_configuration = kwargs.get('dataset_configuration', None)
+ self.ml_assist_configuration = kwargs.get('ml_assist_configuration', None)
+ self.labeling_job_media_properties = kwargs.get('labeling_job_media_properties', None)
+ self.project_id = None
+ self.status = None
+ self.progress_metrics = None
+ self.status_messages = None
+
+
+class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of LabelingJob entities.
+
+ :param value: An array of objects of type LabelingJob.
+ :type value: list[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[LabelingJobResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class LabelingJobTextProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for text data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of text labeling job. Possible values include:
+ "Classification".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.TextAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobTextProperties, self).__init__(**kwargs)
+ self.media_type = 'Text' # type: str
+ self.annotation_type = kwargs.get('annotation_type', None)
+
+
+class LinkedInfo(msrest.serialization.Model):
+ """LinkedInfo.
+
+ :param linked_id: Linked service ID.
+ :type linked_id: str
+ :param linked_resource_name: Linked service resource name.
+ :type linked_resource_name: str
+ :param origin: Type of the linked service. Possible values include: "Synapse".
+ :type origin: str or ~azure_machine_learning_workspaces.models.OriginType
+ """
+
+ _attribute_map = {
+ 'linked_id': {'key': 'linkedId', 'type': 'str'},
+ 'linked_resource_name': {'key': 'linkedResourceName', 'type': 'str'},
+ 'origin': {'key': 'origin', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LinkedInfo, self).__init__(**kwargs)
+ self.linked_id = kwargs.get('linked_id', None)
+ self.linked_resource_name = kwargs.get('linked_resource_name', None)
+ self.origin = kwargs.get('origin', None)
+
+
+class LinkedServiceList(msrest.serialization.Model):
+ """List response of linked service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: Array of linked service.
+ :vartype value: list[~azure_machine_learning_workspaces.models.LinkedServiceResponse]
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[LinkedServiceResponse]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LinkedServiceList, self).__init__(**kwargs)
+ self.value = None
+
+
+class LinkedServiceProps(msrest.serialization.Model):
+ """LinkedService specific properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param linked_service_resource_id: Required. ResourceId of the link target of the linked
+ service.
+ :type linked_service_resource_id: str
+ :ivar link_type: Type of the link target. Default value: "Synapse".
+ :vartype link_type: str
+ :param created_time: The creation time of the linked service.
+ :type created_time: ~datetime.datetime
+ :param modified_time: The last modified time of the linked service.
+ :type modified_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'linked_service_resource_id': {'required': True},
+ 'link_type': {'constant': True},
+ }
+
+ _attribute_map = {
+ 'linked_service_resource_id': {'key': 'linkedServiceResourceId', 'type': 'str'},
+ 'link_type': {'key': 'linkType', 'type': 'str'},
+ 'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+ 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+ }
+
+ link_type = "Synapse"
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LinkedServiceProps, self).__init__(**kwargs)
+ self.linked_service_resource_id = kwargs['linked_service_resource_id']
+ self.created_time = kwargs.get('created_time', None)
+ self.modified_time = kwargs.get('modified_time', None)
+
+
+class LinkedServiceRequest(msrest.serialization.Model):
+ """object used for creating linked service.
+
+ :param name: Friendly name of the linked service.
+ :type name: str
+ :param location: location of the linked service.
+ :type location: str
+ :param identity: Identity for the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param properties: LinkedService specific properties.
+ :type properties: ~azure_machine_learning_workspaces.models.LinkedServiceProps
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'properties': {'key': 'properties', 'type': 'LinkedServiceProps'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LinkedServiceRequest, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.location = kwargs.get('location', None)
+ self.identity = kwargs.get('identity', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class LinkedServiceResponse(msrest.serialization.Model):
+ """Linked service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the link of the linked service.
+ :vartype id: str
+ :ivar name: Friendly name of the linked service.
+ :vartype name: str
+ :ivar type: Resource type of linked service.
+ :vartype type: str
+ :param location: location of the linked service.
+ :type location: str
+ :param identity: Identity for the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param properties: LinkedService specific properties.
+ :type properties: ~azure_machine_learning_workspaces.models.LinkedServiceProps
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'properties': {'key': 'properties', 'type': 'LinkedServiceProps'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LinkedServiceResponse, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.location = kwargs.get('location', None)
+ self.identity = kwargs.get('identity', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class MachineLearningServiceError(msrest.serialization.Model):
+ """Wrapper for error response to follow ARM guidelines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MachineLearningServiceError, self).__init__(**kwargs)
+ self.error = None
+
+
+class ManagedComputeConfiguration(ComputeConfiguration):
+ """ManagedComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'Managed' # type: str
+
+
+class ManagedDeploymentConfiguration(DeploymentConfigurationBase):
+ """ManagedDeploymentConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param app_insights_enabled:
+ :type app_insights_enabled: bool
+ :param max_concurrent_requests_per_instance:
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait_ms:
+ :type max_queue_wait_ms: int
+ :param scoring_timeout_ms:
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param instance_type:
+ :type instance_type: str
+ :param os_type: Possible values include: "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsTypes
+ :param readiness_probe_requirements: The liveness probe requirements.
+ :type readiness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'instance_type': {'key': 'instanceType', 'type': 'str'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'readiness_probe_requirements': {'key': 'readinessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedDeploymentConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'Managed' # type: str
+ self.instance_type = kwargs.get('instance_type', None)
+ self.os_type = kwargs.get('os_type', None)
+ self.readiness_probe_requirements = kwargs.get('readiness_probe_requirements', None)
+
+
+class ManagedIdentityConfiguration(IdentityConfiguration):
+ """ManagedIdentityConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not
+ set this field.
+ :type client_id: str
+ :param object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not
+ set this field.
+ :type object_id: str
+ :param msi_resource_id: Specifies a user-assigned identity by resource ID. For system-assigned,
+ do not set this field.
+ :type msi_resource_id: str
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedIdentityConfiguration, self).__init__(**kwargs)
+ self.identity_type = 'Managed' # type: str
+ self.client_id = kwargs.get('client_id', None)
+ self.object_id = kwargs.get('object_id', None)
+ self.msi_resource_id = kwargs.get('msi_resource_id', None)
+
+
+class MedianStoppingPolicyConfiguration(EarlyTerminationPolicyConfiguration):
+ """Defines an early termination policy based on running averages of the primary metric of all runs.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MedianStoppingPolicyConfiguration, self).__init__(**kwargs)
+ self.policy_type = 'MedianStopping' # type: str
+
+
+class MlAssistConfiguration(msrest.serialization.Model):
+ """Labeling MLAssist configuration definition.
+
+ :param inferencing_compute_binding: AML compute binding used in inferencing.
+ :type inferencing_compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :param training_compute_binding: AML compute binding used in training.
+ :type training_compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :param ml_assist_enabled: Indicates whether MLAssist feature is enabled.
+ :type ml_assist_enabled: bool
+ """
+
+ _attribute_map = {
+ 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'ComputeBinding'},
+ 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'ComputeBinding'},
+ 'ml_assist_enabled': {'key': 'mlAssistEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MlAssistConfiguration, self).__init__(**kwargs)
+ self.inferencing_compute_binding = kwargs.get('inferencing_compute_binding', None)
+ self.training_compute_binding = kwargs.get('training_compute_binding', None)
+ self.ml_assist_enabled = kwargs.get('ml_assist_enabled', None)
+
+
+class Model(msrest.serialization.Model):
+ """An Azure Machine Learning Model.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: The Model Id.
+ :type id: str
+ :param name: Required. The Model name.
+ :type name: str
+ :param framework: The Model framework.
+ :type framework: str
+ :param framework_version: The Model framework version.
+ :type framework_version: str
+ :param version: The Model version assigned by Model Management Service.
+ :type version: long
+ :param datasets: The list of datasets associated with the model.
+ :type datasets: list[~azure_machine_learning_workspaces.models.DatasetReference]
+ :param url: Required. The URL of the Model. Usually a SAS URL.
+ :type url: str
+ :param mime_type: Required. The MIME type of Model content. For more details about MIME type,
+ please open https://www.iana.org/assignments/media-types/media-types.xhtml.
+ :type mime_type: str
+ :param description: The Model description text.
+ :type description: str
+ :param created_time: The Model creation time (UTC).
+ :type created_time: ~datetime.datetime
+ :param modified_time: The Model last modified time (UTC).
+ :type modified_time: ~datetime.datetime
+ :param unpack: Indicates whether we need to unpack the Model during docker Image creation.
+ :type unpack: bool
+ :param parent_model_id: The Parent Model Id.
+ :type parent_model_id: str
+ :param run_id: The RunId that created this model.
+ :type run_id: str
+ :param experiment_name: The name of the experiment where this model was created.
+ :type experiment_name: str
+ :param kv_tags: The Model tag dictionary. Items are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The Model property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param derived_model_ids: Models derived from this model.
+ :type derived_model_ids: list[str]
+ :param sample_input_data: Sample Input Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_input_data: str
+ :param sample_output_data: Sample Output Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_output_data: str
+ :param resource_requirements: Resource requirements for the model.
+ :type resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ 'url': {'required': True},
+ 'mime_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'framework': {'key': 'framework', 'type': 'str'},
+ 'framework_version': {'key': 'frameworkVersion', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'long'},
+ 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+ 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ 'parent_model_id': {'key': 'parentModelId', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'derived_model_ids': {'key': 'derivedModelIds', 'type': '[str]'},
+ 'sample_input_data': {'key': 'sampleInputData', 'type': 'str'},
+ 'sample_output_data': {'key': 'sampleOutputData', 'type': 'str'},
+ 'resource_requirements': {'key': 'resourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Model, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.name = kwargs['name']
+ self.framework = kwargs.get('framework', None)
+ self.framework_version = kwargs.get('framework_version', None)
+ self.version = kwargs.get('version', None)
+ self.datasets = kwargs.get('datasets', None)
+ self.url = kwargs['url']
+ self.mime_type = kwargs['mime_type']
+ self.description = kwargs.get('description', None)
+ self.created_time = kwargs.get('created_time', None)
+ self.modified_time = kwargs.get('modified_time', None)
+ self.unpack = kwargs.get('unpack', None)
+ self.parent_model_id = kwargs.get('parent_model_id', None)
+ self.run_id = kwargs.get('run_id', None)
+ self.experiment_name = kwargs.get('experiment_name', None)
+ self.kv_tags = kwargs.get('kv_tags', None)
+ self.properties = kwargs.get('properties', None)
+ self.derived_model_ids = kwargs.get('derived_model_ids', None)
+ self.sample_input_data = kwargs.get('sample_input_data', None)
+ self.sample_output_data = kwargs.get('sample_output_data', None)
+ self.resource_requirements = kwargs.get('resource_requirements', None)
+
+
+class ModelContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar latest_versions: Latest model versions for each stage. Key is the model stage, value is
+ the model version ARM ID.
+ :vartype latest_versions: dict[str, str]
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'latest_versions': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'latest_versions': {'key': 'properties.latestVersions', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.latest_versions = None
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelContainer entities.
+
+ :param value: An array of objects of type ModelContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ModelContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class ModelDockerSection(msrest.serialization.Model):
+ """ModelDockerSection.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSection, self).__init__(**kwargs)
+ self.base_image = kwargs.get('base_image', None)
+ self.base_dockerfile = kwargs.get('base_dockerfile', None)
+ self.base_image_registry = kwargs.get('base_image_registry', None)
+
+
+class ModelDockerSectionBaseImageRegistry(ContainerRegistry):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSectionBaseImageRegistry, self).__init__(**kwargs)
+
+
+class ModelDockerSectionResponse(msrest.serialization.Model):
+ """ModelDockerSectionResponse.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponse, self).__init__(**kwargs)
+ self.base_image = kwargs.get('base_image', None)
+ self.base_dockerfile = kwargs.get('base_dockerfile', None)
+ self.base_image_registry = kwargs.get('base_image_registry', None)
+
+
+class ModelDockerSectionResponseBaseImageRegistry(ContainerRegistryResponse):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponseBaseImageRegistry, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionDocker(ModelDockerSection):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionDocker, self).__init__(**kwargs)
+
+
+class ModelPythonSection(msrest.serialization.Model):
+ """ModelPythonSection.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelPythonSection, self).__init__(**kwargs)
+ self.interpreter_path = kwargs.get('interpreter_path', None)
+ self.user_managed_dependencies = kwargs.get('user_managed_dependencies', None)
+ self.conda_dependencies = kwargs.get('conda_dependencies', None)
+ self.base_conda_environment = kwargs.get('base_conda_environment', None)
+
+
+class ModelEnvironmentDefinitionPython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionPython, self).__init__(**kwargs)
+
+
+class RSection(msrest.serialization.Model):
+ """RSection.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RSection, self).__init__(**kwargs)
+ self.r_version = kwargs.get('r_version', None)
+ self.user_managed = kwargs.get('user_managed', None)
+ self.rscript_path = kwargs.get('rscript_path', None)
+ self.snapshot_date = kwargs.get('snapshot_date', None)
+ self.cran_packages = kwargs.get('cran_packages', None)
+ self.git_hub_packages = kwargs.get('git_hub_packages', None)
+ self.custom_url_packages = kwargs.get('custom_url_packages', None)
+ self.bio_conductor_packages = kwargs.get('bio_conductor_packages', None)
+
+
+class ModelEnvironmentDefinitionR(RSection):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionR, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionResponseDocker(ModelDockerSectionResponse):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseDocker, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionResponsePython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponsePython, self).__init__(**kwargs)
+
+
+class RSectionResponse(msrest.serialization.Model):
+ """RSectionResponse.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RSectionResponse, self).__init__(**kwargs)
+ self.r_version = kwargs.get('r_version', None)
+ self.user_managed = kwargs.get('user_managed', None)
+ self.rscript_path = kwargs.get('rscript_path', None)
+ self.snapshot_date = kwargs.get('snapshot_date', None)
+ self.cran_packages = kwargs.get('cran_packages', None)
+ self.git_hub_packages = kwargs.get('git_hub_packages', None)
+ self.custom_url_packages = kwargs.get('custom_url_packages', None)
+ self.bio_conductor_packages = kwargs.get('bio_conductor_packages', None)
+
+
+class ModelEnvironmentDefinitionResponseR(RSectionResponse):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseR, self).__init__(**kwargs)
+
+
+class ModelSparkSection(msrest.serialization.Model):
+ """ModelSparkSection.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelSparkSection, self).__init__(**kwargs)
+ self.repositories = kwargs.get('repositories', None)
+ self.packages = kwargs.get('packages', None)
+ self.precache_packages = kwargs.get('precache_packages', None)
+
+
+class ModelEnvironmentDefinitionResponseSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseSpark, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionSpark, self).__init__(**kwargs)
+
+
+class ModelVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param stage: Model asset stage.
+ :type stage: str
+ :param flavors: Dictionary mapping model flavors to their properties.
+ :type flavors: dict[str, object]
+ :param datastore_id: The asset datastoreId.
+ :type datastore_id: str
+ :param asset_path: DEPRECATED - use
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead.
+ :type asset_path: ~azure_machine_learning_workspaces.models.AssetPath
+ :param path: The path of the file/directory.
+ :type path: str
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'stage': {'key': 'properties.stage', 'type': 'str'},
+ 'flavors': {'key': 'properties.flavors', 'type': '{object}'},
+ 'datastore_id': {'key': 'properties.datastoreId', 'type': 'str'},
+ 'asset_path': {'key': 'properties.assetPath', 'type': 'AssetPath'},
+ 'path': {'key': 'properties.path', 'type': 'str'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.stage = kwargs.get('stage', None)
+ self.flavors = kwargs.get('flavors', None)
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.asset_path = kwargs.get('asset_path', None)
+ self.path = kwargs.get('path', None)
+ self.generated_by = kwargs.get('generated_by', None)
+ self.description = kwargs.get('description', None)
+ self.tags = kwargs.get('tags', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelVersion entities.
+
+ :param value: An array of objects of type ModelVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ModelVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Mpi(DistributionConfiguration):
+ """Mpi.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count_per_instance:
+ :type process_count_per_instance: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Mpi, self).__init__(**kwargs)
+ self.distribution_type = 'Mpi' # type: str
+ self.process_count_per_instance = kwargs.get('process_count_per_instance', None)
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookListCredentialsResult(msrest.serialization.Model):
+ """NotebookListCredentialsResult.
+
+ :param primary_access_key:
+ :type primary_access_key: str
+ :param secondary_access_key:
+ :type secondary_access_key: str
+ """
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookListCredentialsResult, self).__init__(**kwargs)
+ self.primary_access_key = kwargs.get('primary_access_key', None)
+ self.secondary_access_key = kwargs.get('secondary_access_key', None)
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = kwargs.get('error_message', None)
+ self.status_code = kwargs.get('status_code', None)
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = kwargs.get('fqdn', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None)
+
+
+class OnlineDeploymentScaleSettings(msrest.serialization.Model):
+ """OnlineDeploymentScaleSettings.
+
+ :param minimum:
+ :type minimum: int
+ :param maximum:
+ :type maximum: int
+ :param instance_count:
+ :type instance_count: int
+ :param scale_type: Possible values include: "Automatic", "Manual", "None".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleTypeMode
+ """
+
+ _attribute_map = {
+ 'minimum': {'key': 'minimum', 'type': 'int'},
+ 'maximum': {'key': 'maximum', 'type': 'int'},
+ 'instance_count': {'key': 'instanceCount', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineDeploymentScaleSettings, self).__init__(**kwargs)
+ self.minimum = kwargs.get('minimum', None)
+ self.maximum = kwargs.get('maximum', None)
+ self.instance_count = kwargs.get('instance_count', None)
+ self.scale_type = kwargs.get('scale_type', None)
+
+
+class OnlineDeploymentTrackedResource(msrest.serialization.Model):
+ """OnlineDeploymentTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location: Required.
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param scale_settings:
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineDeploymentScaleSettings
+ :param deployment_configuration: Required.
+ :type deployment_configuration:
+ ~azure_machine_learning_workspaces.models.DeploymentConfigurationBase
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :param model_reference: Required.
+ :type model_reference: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param environment_id: Environment specification for the endpoint deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ """
+
+ _validation = {
+ 'location': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'deployment_configuration': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'model_reference': {'required': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'OnlineDeploymentScaleSettings'},
+ 'deployment_configuration': {'key': 'properties.deploymentConfiguration', 'type': 'DeploymentConfigurationBase'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'model_reference': {'key': 'properties.modelReference', 'type': 'AssetReferenceBase'},
+ 'code_configuration': {'key': 'properties.codeConfiguration', 'type': 'CodeConfiguration'},
+ 'environment_id': {'key': 'properties.environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'properties.environmentVariables', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResource, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.location = kwargs['location']
+ self.kind = kwargs.get('kind', None)
+ self.identity = kwargs.get('identity', None)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.scale_settings = kwargs.get('scale_settings', None)
+ self.deployment_configuration = kwargs['deployment_configuration']
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+ self.model_reference = kwargs['model_reference']
+ self.code_configuration = kwargs.get('code_configuration', None)
+ self.environment_id = kwargs.get('environment_id', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+
+
+class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineDeployment entities.
+
+ :param value: An array of objects of type OnlineDeployment.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OnlineDeploymentTrackedResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class OnlineEndpointTrackedResource(msrest.serialization.Model):
+ """OnlineEndpointTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location: Required.
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar provisioning_state: State of endpoint provisioning. Possible values include: "Creating",
+ "Deleting", "Succeeded", "Failed", "Updating", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.EndpointProvisioningState
+ :param description: Description of the inference endpoint.
+ :type description: str
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :param traffic_rules: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic_rules: dict[str, int]
+ :param compute_configuration: Required.
+ :type compute_configuration: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :ivar endpoint: Endpoint URI.
+ :vartype endpoint: str
+ :ivar swagger_endpoint: Endpoint Swagger URI.
+ :vartype swagger_endpoint: str
+ :param auth_mode: Required. Inference endpoint authentication mode type. Possible values
+ include: "AMLToken", "Key", "AADToken".
+ :type auth_mode: str or ~azure_machine_learning_workspaces.models.EndpointAuthModeType
+ """
+
+ _validation = {
+ 'location': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'compute_configuration': {'required': True},
+ 'endpoint': {'readonly': True},
+ 'swagger_endpoint': {'readonly': True},
+ 'auth_mode': {'required': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'traffic_rules': {'key': 'properties.trafficRules', 'type': '{int}'},
+ 'compute_configuration': {'key': 'properties.computeConfiguration', 'type': 'ComputeConfiguration'},
+ 'endpoint': {'key': 'properties.endpoint', 'type': 'str'},
+ 'swagger_endpoint': {'key': 'properties.swaggerEndpoint', 'type': 'str'},
+ 'auth_mode': {'key': 'properties.authMode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResource, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.location = kwargs['location']
+ self.kind = kwargs.get('kind', None)
+ self.identity = kwargs.get('identity', None)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+ self.traffic_rules = kwargs.get('traffic_rules', None)
+ self.compute_configuration = kwargs['compute_configuration']
+ self.endpoint = None
+ self.swagger_endpoint = None
+ self.auth_mode = kwargs['auth_mode']
+
+
+class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineEndpoint entities.
+
+ :param value: An array of objects of type OnlineEndpoint.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OnlineEndpointTrackedResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.display = kwargs.get('display', None)
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = kwargs.get('provider', None)
+ self.resource = kwargs.get('resource', None)
+ self.operation = kwargs.get('operation', None)
+ self.description = kwargs.get('description', None)
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class OutputData(msrest.serialization.Model):
+ """OutputData.
+
+ :param dataset_name: Output dataset name.
+ :type dataset_name: str
+ :param datastore: Datastore location for output data.
+ :type datastore: str
+ :param datapath: Path location within the datastore for output data.
+ :type datapath: str
+ :param mode: Mode type for data. Possible values include: "Mount", "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ """
+
+ _attribute_map = {
+ 'dataset_name': {'key': 'datasetName', 'type': 'str'},
+ 'datastore': {'key': 'datastore', 'type': 'str'},
+ 'datapath': {'key': 'datapath', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OutputData, self).__init__(**kwargs)
+ self.dataset_name = kwargs.get('dataset_name', None)
+ self.datastore = kwargs.get('datastore', None)
+ self.datapath = kwargs.get('datapath', None)
+ self.mode = kwargs.get('mode', None)
+
+
+class OutputPathAssetReference(AssetReferenceBase):
+ """OutputPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param path:
+ :type path: str
+ :param job_id:
+ :type job_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'job_id': {'key': 'jobId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OutputPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'OutputPath' # type: str
+ self.path = kwargs.get('path', None)
+ self.job_id = kwargs.get('job_id', None)
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedServiceList(msrest.serialization.Model):
+ """Paginated list of Machine Learning service objects wrapped in ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ServiceResource]
+ :ivar next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ServiceResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedServiceList, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class ParameterSamplingConfiguration(msrest.serialization.Model):
+ """class for all hyperparameter sampling algorithms.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param parameter_space: Required. A dictionary containing each parameter and its distribution.
+ The dictionary key is the name of the parameter.
+ :type parameter_space: object
+ :param sampling_type: Required. Type of the hyperparameter sampling algorithms. Possible values
+ include: "Grid", "Random", "Bayesian".
+ :type sampling_type: str or ~azure_machine_learning_workspaces.models.ParameterSamplingType
+ """
+
+ _validation = {
+ 'parameter_space': {'required': True},
+ 'sampling_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'parameter_space': {'key': 'parameterSpace', 'type': 'object'},
+ 'sampling_type': {'key': 'samplingType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ParameterSamplingConfiguration, self).__init__(**kwargs)
+ self.parameter_space = kwargs['parameter_space']
+ self.sampling_type = kwargs['sampling_type']
+
+
+class PartialOnlineDeployment(msrest.serialization.Model):
+ """Mutable online deployment configuration.
+
+ :param scale_settings:
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineDeploymentScaleSettings
+ :param deployment_configuration:
+ :type deployment_configuration:
+ ~azure_machine_learning_workspaces.models.DeploymentConfigurationBase
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineDeploymentScaleSettings'},
+ 'deployment_configuration': {'key': 'deploymentConfiguration', 'type': 'DeploymentConfigurationBase'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineDeployment, self).__init__(**kwargs)
+ self.scale_settings = kwargs.get('scale_settings', None)
+ self.deployment_configuration = kwargs.get('deployment_configuration', None)
+
+
+class PartialOnlineDeploymentPartialTrackedResource(msrest.serialization.Model):
+ """PartialOnlineDeploymentPartialTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location:
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineDeployment
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineDeployment'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineDeploymentPartialTrackedResource, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.location = kwargs.get('location', None)
+ self.kind = kwargs.get('kind', None)
+ self.identity = kwargs.get('identity', None)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.properties = kwargs.get('properties', None)
+ self.system_data = None
+
+
+class PartialOnlineEndpoint(msrest.serialization.Model):
+ """Mutable online endpoint configuration.
+
+ :param traffic_rules: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic_rules: dict[str, int]
+ """
+
+ _attribute_map = {
+ 'traffic_rules': {'key': 'trafficRules', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineEndpoint, self).__init__(**kwargs)
+ self.traffic_rules = kwargs.get('traffic_rules', None)
+
+
+class PartialOnlineEndpointPartialTrackedResource(msrest.serialization.Model):
+ """PartialOnlineEndpointPartialTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location:
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineEndpoint
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineEndpoint'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineEndpointPartialTrackedResource, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.location = kwargs.get('location', None)
+ self.kind = kwargs.get('kind', None)
+ self.identity = kwargs.get('identity', None)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.properties = kwargs.get('properties', None)
+ self.system_data = None
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = kwargs.get('assigned_user', None)
+
+
+class Pipeline(msrest.serialization.Model):
+ """Pipeline.
+
+ :param continue_run_on_step_failure: Flag when set, continue pipeline execution if a step
+ fails.
+ :type continue_run_on_step_failure: bool
+ :param default_datastore_name: Default datastore name shared by all pipeline jobs.
+ :type default_datastore_name: str
+ :param component_jobs: JobDefinition set for PipelineStepJobs.
+ :type component_jobs: dict[str, ~azure_machine_learning_workspaces.models.ComponentJob]
+ :param inputs: Data input set for jobs.
+ :type inputs: dict[str, ~azure_machine_learning_workspaces.models.PipelineInput]
+ :param outputs: Data output set for jobs.
+ :type outputs: dict[str, ~azure_machine_learning_workspaces.models.PipelineOutput]
+ """
+
+ _attribute_map = {
+ 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
+ 'default_datastore_name': {'key': 'defaultDatastoreName', 'type': 'str'},
+ 'component_jobs': {'key': 'componentJobs', 'type': '{ComponentJob}'},
+ 'inputs': {'key': 'inputs', 'type': '{PipelineInput}'},
+ 'outputs': {'key': 'outputs', 'type': '{PipelineOutput}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Pipeline, self).__init__(**kwargs)
+ self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
+ self.default_datastore_name = kwargs.get('default_datastore_name', None)
+ self.component_jobs = kwargs.get('component_jobs', None)
+ self.inputs = kwargs.get('inputs', None)
+ self.outputs = kwargs.get('outputs', None)
+
+
+class PipelineInput(msrest.serialization.Model):
+ """PipelineInput.
+
+ :param data: Input data definition.
+ :type data: ~azure_machine_learning_workspaces.models.InputData
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'InputData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PipelineInput, self).__init__(**kwargs)
+ self.data = kwargs.get('data', None)
+
+
+class PipelineJob(ComputeJobBase):
+ """Pipeline Job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param pipeline_type: Type of PipelineJob. Possible values include: "AzureML".
+ :type pipeline_type: str or ~azure_machine_learning_workspaces.models.PipelineType
+ :param pipeline: Pipeline details.
+ :type pipeline: ~azure_machine_learning_workspaces.models.Pipeline
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
+ 'pipeline': {'key': 'pipeline', 'type': 'Pipeline'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PipelineJob, self).__init__(**kwargs)
+ self.job_type = 'Pipeline' # type: str
+ self.status = None
+ self.pipeline_type = kwargs.get('pipeline_type', None)
+ self.pipeline = kwargs.get('pipeline', None)
+
+
+class PipelineOutput(msrest.serialization.Model):
+ """PipelineOutput.
+
+ :param data: Output data definition.
+ :type data: ~azure_machine_learning_workspaces.models.OutputData
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'OutputData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PipelineOutput, self).__init__(**kwargs)
+ self.data = kwargs.get('data', None)
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.private_endpoint = kwargs.get('private_endpoint', None)
+ self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
+ self.provisioning_state = None
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(**kwargs)
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = kwargs.get('required_zone_names', None)
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.description = kwargs.get('description', None)
+ self.actions_required = kwargs.get('actions_required', None)
+
+
+class ProgressMetrics(msrest.serialization.Model):
+ """Progress metrics definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar total_datapoint_count: The total datapoint count.
+ :vartype total_datapoint_count: long
+ :ivar completed_datapoint_count: The completed datapoint count.
+ :vartype completed_datapoint_count: long
+ :ivar skipped_datapoint_count: The skipped datapoint count.
+ :vartype skipped_datapoint_count: long
+ :ivar incremental_dataset_last_refresh_time: The time of last successful incremental dataset
+ refresh in UTC.
+ :vartype incremental_dataset_last_refresh_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'total_datapoint_count': {'readonly': True},
+ 'completed_datapoint_count': {'readonly': True},
+ 'skipped_datapoint_count': {'readonly': True},
+ 'incremental_dataset_last_refresh_time': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'},
+ 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'},
+ 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'},
+ 'incremental_dataset_last_refresh_time': {'key': 'incrementalDatasetLastRefreshTime', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ProgressMetrics, self).__init__(**kwargs)
+ self.total_datapoint_count = None
+ self.completed_datapoint_count = None
+ self.skipped_datapoint_count = None
+ self.incremental_dataset_last_refresh_time = None
+
+
+class PyTorch(DistributionConfiguration):
+ """PyTorch.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count: Total process count for the distributed job.
+ :type process_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count': {'key': 'processCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PyTorch, self).__init__(**kwargs)
+ self.distribution_type = 'PyTorch' # type: str
+ self.process_count = kwargs.get('process_count', None)
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param location: Region of the AML workspace in the id.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.type = kwargs.get('type', None)
+ self.limit = kwargs.get('limit', None)
+ self.unit = kwargs.get('unit', None)
+ self.location = kwargs.get('location', None)
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class RCranPackage(msrest.serialization.Model):
+ """RCranPackage.
+
+ :param name: The package name.
+ :type name: str
+ :param repository: The repository name.
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RCranPackage, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.repository = kwargs.get('repository', None)
+
+
+class RegenerateEndpointKeysRequest(msrest.serialization.Model):
+ """RegenerateEndpointKeysRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_type: Required. Specification for which type of key to generate. Primary or
+ Secondary. Possible values include: "Primary", "Secondary".
+ :type key_type: str or ~azure_machine_learning_workspaces.models.KeyType
+ :param key_value: The value the key is set to.
+ :type key_value: str
+ """
+
+ _validation = {
+ 'key_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_type': {'key': 'keyType', 'type': 'str'},
+ 'key_value': {'key': 'keyValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RegenerateEndpointKeysRequest, self).__init__(**kwargs)
+ self.key_type = kwargs['key_type']
+ self.key_value = kwargs.get('key_value', None)
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = kwargs.get('passwords', None)
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class ResourceIdentity(msrest.serialization.Model):
+ """Service identity associated with a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param type: Defines values for a ResourceIdentity's type. Possible values include:
+ "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityAssignment
+ :ivar principal_id: Oid that used as the "client_id" when authenticating.
+ :vartype principal_id: str
+ :ivar tenant_id: AAD Tenant where this identity lives.
+ :vartype tenant_id: str
+ :param user_assigned_identities: Dictionary of the user assigned identities, key is ResourceId
+ of the UAI.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentityMeta]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentityMeta}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceIdentity, self).__init__(**kwargs)
+ self.type = kwargs.get('type', None)
+ self.principal_id = None
+ self.tenant_id = None
+ self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar location: Region of the AML workspace in the id.
+ :vartype location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = kwargs.get('reason_code', None)
+
+
+class RGitHubPackage(msrest.serialization.Model):
+ """RGitHubPackage.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ :param auth_token: Personal access token to install from a private repo.
+ :type auth_token: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ 'auth_token': {'key': 'authToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RGitHubPackage, self).__init__(**kwargs)
+ self.repository = kwargs.get('repository', None)
+ self.auth_token = kwargs.get('auth_token', None)
+
+
+class RGitHubPackageResponse(msrest.serialization.Model):
+ """RGitHubPackageResponse.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RGitHubPackageResponse, self).__init__(**kwargs)
+ self.repository = kwargs.get('repository', None)
+
+
+class Route(msrest.serialization.Model):
+ """Route.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param path: Required. The path for the route.
+ :type path: str
+ :param port: Required. The port for the route.
+ :type port: int
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port': {'required': True},
+ }
+
+ _attribute_map = {
+ 'path': {'key': 'path', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Route, self).__init__(**kwargs)
+ self.path = kwargs['path']
+ self.port = kwargs['port']
+
+
+class SasSection(msrest.serialization.Model):
+ """SasSection.
+
+ :param sas_token: Storage container SAS token.
+ :type sas_token: str
+ """
+
+ _attribute_map = {
+ 'sas_token': {'key': 'sasToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SasSection, self).__init__(**kwargs)
+ self.sas_token = kwargs.get('sas_token', None)
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = kwargs['max_node_count']
+ self.min_node_count = kwargs.get('min_node_count', 0)
+ self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None)
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = kwargs.get('script_source', None)
+ self.script_data = kwargs.get('script_data', None)
+ self.script_arguments = kwargs.get('script_arguments', None)
+ self.timeout = kwargs.get('timeout', None)
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = kwargs.get('startup_script', None)
+ self.creation_script = kwargs.get('creation_script', None)
+
+
+class ServicePrincipalConfiguration(IdentityConfiguration):
+ """ServicePrincipalConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param secret: Required.
+ :type secret: str
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ 'secret': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ 'secret': {'key': 'secret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalConfiguration, self).__init__(**kwargs)
+ self.identity_type = 'ServicePrincipal' # type: str
+ self.secret = kwargs['secret']
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = kwargs['client_id']
+ self.client_secret = kwargs['client_secret']
+
+
+class ServicePrincipalSection(msrest.serialization.Model):
+ """ServicePrincipalSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param client_secret: Service principal secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'tenant_id': {'required': True},
+ 'client_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalSection, self).__init__(**kwargs)
+ self.authority_url = kwargs.get('authority_url', None)
+ self.resource_uri = kwargs.get('resource_uri', None)
+ self.tenant_id = kwargs['tenant_id']
+ self.client_id = kwargs['client_id']
+ self.client_secret = kwargs.get('client_secret', None)
+
+
+class ServiceResource(Resource):
+ """Machine Learning service object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param properties: Service properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ServiceResponseBase
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'properties': {'key': 'properties', 'type': 'ServiceResponseBase'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResource, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class ServiceResponseBaseError(ErrorResponse):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResponseBaseError, self).__init__(**kwargs)
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = kwargs.get('scripts', None)
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.private_link_resource_id = kwargs.get('private_link_resource_id', None)
+ self.group_id = kwargs.get('group_id', None)
+ self.request_message = kwargs.get('request_message', None)
+ self.status = kwargs.get('status', None)
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.tier = kwargs.get('tier', None)
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.value = kwargs.get('value', None)
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class SparkMavenPackage(msrest.serialization.Model):
+ """SparkMavenPackage.
+
+ :param group:
+ :type group: str
+ :param artifact:
+ :type artifact: str
+ :param version:
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'group': {'key': 'group', 'type': 'str'},
+ 'artifact': {'key': 'artifact', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SparkMavenPackage, self).__init__(**kwargs)
+ self.group = kwargs.get('group', None)
+ self.artifact = kwargs.get('artifact', None)
+ self.version = kwargs.get('version', None)
+
+
+class SqlAdminSection(msrest.serialization.Model):
+ """SqlAdminSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param user_id: Required. SQL database user name.
+ :type user_id: str
+ :param password: SQL database password.
+ :type password: str
+ """
+
+ _validation = {
+ 'user_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SqlAdminSection, self).__init__(**kwargs)
+ self.user_id = kwargs['user_id']
+ self.password = kwargs.get('password', None)
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.cert = kwargs.get('cert', None)
+ self.key = kwargs.get('key', None)
+ self.cname = kwargs.get('cname', None)
+
+
+class StatusMessage(msrest.serialization.Model):
+ """Active message associated with project.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar level: Severity level of message. Possible values include: "Error", "Information",
+ "Warning".
+ :vartype level: str or ~azure_machine_learning_workspaces.models.StatusMessageLevel
+ :ivar code: Service-defined message code.
+ :vartype code: str
+ :ivar message: A human-readable representation of the message code.
+ :vartype message: str
+ """
+
+ _validation = {
+ 'level': {'readonly': True},
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'level': {'key': 'level', 'type': 'str'},
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(StatusMessage, self).__init__(**kwargs)
+ self.level = None
+ self.code = None
+ self.message = None
+
+
+class SweepJob(ComputeJobBase):
+ """SweepJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: The status of a job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param parameter_sampling_configuration: Required. class for all hyperparameter sampling
+ algorithms.
+ :type parameter_sampling_configuration:
+ ~azure_machine_learning_workspaces.models.ParameterSamplingConfiguration
+ :param termination_configuration:
+ :type termination_configuration:
+ ~azure_machine_learning_workspaces.models.TerminationConfiguration
+ :param evaluation_configuration: Required.
+ :type evaluation_configuration:
+ ~azure_machine_learning_workspaces.models.EvaluationConfiguration
+ :param trial_component:
+ :type trial_component: ~azure_machine_learning_workspaces.models.TrialComponent
+ :param identity_configuration:
+ :type identity_configuration: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ 'parameter_sampling_configuration': {'required': True},
+ 'evaluation_configuration': {'required': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'parameter_sampling_configuration': {'key': 'parameterSamplingConfiguration', 'type': 'ParameterSamplingConfiguration'},
+ 'termination_configuration': {'key': 'terminationConfiguration', 'type': 'TerminationConfiguration'},
+ 'evaluation_configuration': {'key': 'evaluationConfiguration', 'type': 'EvaluationConfiguration'},
+ 'trial_component': {'key': 'trialComponent', 'type': 'TrialComponent'},
+ 'identity_configuration': {'key': 'identityConfiguration', 'type': 'IdentityConfiguration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SweepJob, self).__init__(**kwargs)
+ self.job_type = 'Sweep' # type: str
+ self.status = None
+ self.parameter_sampling_configuration = kwargs['parameter_sampling_configuration']
+ self.termination_configuration = kwargs.get('termination_configuration', None)
+ self.evaluation_configuration = kwargs['evaluation_configuration']
+ self.trial_component = kwargs.get('trial_component', None)
+ self.identity_configuration = kwargs.get('identity_configuration', None)
+
+
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = kwargs.get('created_by', None)
+ self.created_by_type = kwargs.get('created_by_type', None)
+ self.created_at = kwargs.get('created_at', None)
+ self.last_modified_by = kwargs.get('last_modified_by', None)
+ self.last_modified_by_type = kwargs.get('last_modified_by_type', None)
+ self.last_modified_at = kwargs.get('last_modified_at', None)
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class TensorFlow(DistributionConfiguration):
+ """TensorFlow.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param worker_count: Number of workers. Overwrites the node count in compute binding.
+ :type worker_count: int
+ :param parameter_server_count:
+ :type parameter_server_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'worker_count': {'key': 'workerCount', 'type': 'int'},
+ 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TensorFlow, self).__init__(**kwargs)
+ self.distribution_type = 'TensorFlow' # type: str
+ self.worker_count = kwargs.get('worker_count', None)
+ self.parameter_server_count = kwargs.get('parameter_server_count', None)
+
+
+class TerminationConfiguration(msrest.serialization.Model):
+ """TerminationConfiguration.
+
+ :param max_total_runs:
+ :type max_total_runs: int
+ :param max_concurrent_runs:
+ :type max_concurrent_runs: int
+ :param max_duration_minutes:
+ :type max_duration_minutes: int
+ :param early_termination_policy_configuration: Early termination policies enable canceling
+ poor-performing runs before they complete.
+ :type early_termination_policy_configuration:
+ ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyConfiguration
+ """
+
+ _attribute_map = {
+ 'max_total_runs': {'key': 'maxTotalRuns', 'type': 'int'},
+ 'max_concurrent_runs': {'key': 'maxConcurrentRuns', 'type': 'int'},
+ 'max_duration_minutes': {'key': 'maxDurationMinutes', 'type': 'int'},
+ 'early_termination_policy_configuration': {'key': 'earlyTerminationPolicyConfiguration', 'type': 'EarlyTerminationPolicyConfiguration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TerminationConfiguration, self).__init__(**kwargs)
+ self.max_total_runs = kwargs.get('max_total_runs', None)
+ self.max_concurrent_runs = kwargs.get('max_concurrent_runs', None)
+ self.max_duration_minutes = kwargs.get('max_duration_minutes', None)
+ self.early_termination_policy_configuration = kwargs.get('early_termination_policy_configuration', None)
+
+
+class TrainingDataSettings(msrest.serialization.Model):
+ """Dataset datamodel.
+This is the class represents the Dataset Json string structure that passed into Jasmine.
+
+ :param dataset_arm_id: The Dataset Arm Id.
+ :type dataset_arm_id: str
+ :param target_column_name: Label column name.
+ :type target_column_name: str
+ :param weight_column_name: Weight column name.
+ :type weight_column_name: str
+ """
+
+ _attribute_map = {
+ 'dataset_arm_id': {'key': 'datasetArmId', 'type': 'str'},
+ 'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
+ 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TrainingDataSettings, self).__init__(**kwargs)
+ self.dataset_arm_id = kwargs.get('dataset_arm_id', None)
+ self.target_column_name = kwargs.get('target_column_name', None)
+ self.weight_column_name = kwargs.get('weight_column_name', None)
+
+
+class TrainingSettings(msrest.serialization.Model):
+ """Training related configuration.
+
+ :param trial_timeout_in_minutes: Iteration Timeout.
+ :type trial_timeout_in_minutes: int
+ :param block_list_models: List of Algorithms/Models to be blocked for training.
+ :type block_list_models: list[str]
+ :param allow_list_models: List of Algorithms/Models to be Allowed for training.
+ :type allow_list_models: list[str]
+ :param experiment_exit_score: Exit score for the AutoML experiment.
+ :type experiment_exit_score: float
+ :param enable_early_termination: Enable early termination.
+ :type enable_early_termination: bool
+ """
+
+ _attribute_map = {
+ 'trial_timeout_in_minutes': {'key': 'trialTimeoutInMinutes', 'type': 'int'},
+ 'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
+ 'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
+ 'experiment_exit_score': {'key': 'experimentExitScore', 'type': 'float'},
+ 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TrainingSettings, self).__init__(**kwargs)
+ self.trial_timeout_in_minutes = kwargs.get('trial_timeout_in_minutes', None)
+ self.block_list_models = kwargs.get('block_list_models', None)
+ self.allow_list_models = kwargs.get('allow_list_models', None)
+ self.experiment_exit_score = kwargs.get('experiment_exit_score', None)
+ self.enable_early_termination = kwargs.get('enable_early_termination', None)
+
+
+class TrialComponent(msrest.serialization.Model):
+ """TrialComponent.
+
+ :param code_configuration: Code configuration of the job.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param environment_id: Environment id of the job.
+ :type environment_id: str
+ :param data_bindings: Mapping of data bindings used in the job.
+ :type data_bindings: dict[str, ~azure_machine_learning_workspaces.models.DataBinding]
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param distribution_configuration:
+ :type distribution_configuration:
+ ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ """
+
+ _attribute_map = {
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'data_bindings': {'key': 'dataBindings', 'type': '{DataBinding}'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'distribution_configuration': {'key': 'distributionConfiguration', 'type': 'DistributionConfiguration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TrialComponent, self).__init__(**kwargs)
+ self.code_configuration = kwargs.get('code_configuration', None)
+ self.environment_id = kwargs.get('environment_id', None)
+ self.data_bindings = kwargs.get('data_bindings', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.distribution_configuration = kwargs.get('distribution_configuration', None)
+
+
+class TruncationSelectionPolicyConfiguration(EarlyTerminationPolicyConfiguration):
+ """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ :param truncation_percentage:
+ :type truncation_percentage: int
+ :param exclude_finished_jobs:
+ :type exclude_finished_jobs: bool
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
+ 'exclude_finished_jobs': {'key': 'excludeFinishedJobs', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TruncationSelectionPolicyConfiguration, self).__init__(**kwargs)
+ self.policy_type = 'TruncationSelection' # type: str
+ self.truncation_percentage = kwargs.get('truncation_percentage', None)
+ self.exclude_finished_jobs = kwargs.get('exclude_finished_jobs', None)
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = kwargs.get('limit', None)
+ self.unit = None
+ self.status = kwargs.get('status', None)
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = kwargs['admin_user_name']
+ self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None)
+ self.admin_user_password = kwargs.get('admin_user_password', None)
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class UserAssignedIdentityMeta(msrest.serialization.Model):
+ """User assigned identities associated with a resource.
+
+ :param principal_id: the object ID of the service principal object for your managed identity
+ that is used to grant role-based access to an Azure resource.
+ :type principal_id: str
+ :param client_id: aka appId, a unique identifier generated by Azure AD that is tied to an
+ application and service principal during its initial provisioning.
+ :type client_id: str
+ """
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentityMeta, self).__init__(**kwargs)
+ self.principal_id = kwargs.get('principal_id', None)
+ self.client_id = kwargs.get('client_id', None)
+
+
+class ValidationDataSettings(msrest.serialization.Model):
+ """ValidationDataSettings.
+
+ :param dataset_arm_id: Dataset Arm id..
+ :type dataset_arm_id: str
+ :param n_cross_validations: Number of cross validation folds to be applied on training dataset
+ when validation dataset is not provided.
+ :type n_cross_validations: int
+ :param validation_size: The fraction of training dataset that needs to be set aside for
+ validation purpose.
+ Values between (0.0 , 1.0)
+ Applied when validation dataset is not provided.
+ :type validation_size: float
+ """
+
+ _attribute_map = {
+ 'dataset_arm_id': {'key': 'datasetArmId', 'type': 'str'},
+ 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'int'},
+ 'validation_size': {'key': 'validationSize', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ValidationDataSettings, self).__init__(**kwargs)
+ self.dataset_arm_id = kwargs.get('dataset_arm_id', None)
+ self.n_cross_validations = kwargs.get('n_cross_validations', None)
+ self.validation_size = kwargs.get('validation_size', None)
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = kwargs.get('virtual_machine_size', None)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None)
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param aml_compute: The list of virtual machine sizes supported by AmlCompute.
+ :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.aml_compute = kwargs.get('aml_compute', None)
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = kwargs.get('username', None)
+ self.password = kwargs.get('password', None)
+ self.public_key_data = kwargs.get('public_key_data', None)
+ self.private_key_data = kwargs.get('private_key_data', None)
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format.
+ :vartype creation_time: ~datetime.datetime
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'creation_time': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(**kwargs)
+ self.workspace_id = None
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.creation_time = None
+ self.key_vault = kwargs.get('key_vault', None)
+ self.application_insights = kwargs.get('application_insights', None)
+ self.container_registry = kwargs.get('container_registry', None)
+ self.storage_account = kwargs.get('storage_account', None)
+ self.discovery_url = kwargs.get('discovery_url', None)
+ self.provisioning_state = None
+ self.encryption = kwargs.get('encryption', None)
+ self.hbi_workspace = kwargs.get('hbi_workspace', False)
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False)
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None)
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+
+
+class WorkspaceConnectionDto(msrest.serialization.Model):
+ """object used for creating workspace connection.
+
+ :param name: Friendly name of the workspace connection.
+ :type name: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnectionDto, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = kwargs.get('restrictions', None)
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
new file mode 100644
index 00000000000..eb28b3b6cda
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
@@ -0,0 +1,13558 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_machine_learning_workspaces_enums import *
+
+
+class AccountKeySection(msrest.serialization.Model):
+ """AccountKeySection.
+
+ :param key: Storage account key.
+ :type key: str
+ """
+
+ _attribute_map = {
+ 'key': {'key': 'key', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key: Optional[str] = None,
+ **kwargs
+ ):
+ super(AccountKeySection, self).__init__(**kwargs)
+ self.key = key
+
+
+class CreateServiceRequest(msrest.serialization.Model):
+ """The base class for creating a service.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceCreateRequest, CreateEndpointVariantRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceCreateRequest', 'Custom': 'CreateEndpointVariantRequest'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ **kwargs
+ ):
+ super(CreateServiceRequest, self).__init__(**kwargs)
+ self.description = description
+ self.kv_tags = kv_tags
+ self.properties = properties
+ self.keys = keys
+ self.compute_type = None # type: Optional[str]
+ self.environment_image_request = environment_image_request
+ self.location = location
+
+
+class AciServiceCreateRequest(CreateServiceRequest):
+ """AciServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param dns_name_label: The Dns label for the service.
+ :type dns_name_label: str
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ auth_enabled: Optional[bool] = False,
+ ssl_enabled: Optional[bool] = False,
+ app_insights_enabled: Optional[bool] = False,
+ data_collection: Optional["ModelDataCollection"] = None,
+ ssl_certificate: Optional[str] = None,
+ ssl_key: Optional[str] = None,
+ cname: Optional[str] = None,
+ dns_name_label: Optional[str] = None,
+ vnet_configuration: Optional["VnetConfiguration"] = None,
+ encryption_properties: Optional["EncryptionProperties"] = None,
+ **kwargs
+ ):
+ super(AciServiceCreateRequest, self).__init__(description=description, kv_tags=kv_tags, properties=properties, keys=keys, environment_image_request=environment_image_request, location=location, **kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = container_resource_requirements
+ self.auth_enabled = auth_enabled
+ self.ssl_enabled = ssl_enabled
+ self.app_insights_enabled = app_insights_enabled
+ self.data_collection = data_collection
+ self.ssl_certificate = ssl_certificate
+ self.ssl_key = ssl_key
+ self.cname = cname
+ self.dns_name_label = dns_name_label
+ self.vnet_configuration = vnet_configuration
+ self.encryption_properties = encryption_properties
+
+
+class ModelDataCollection(msrest.serialization.Model):
+ """The Model data collection properties.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelDataCollection, self).__init__(**kwargs)
+ self.event_hub_enabled = event_hub_enabled
+ self.storage_enabled = storage_enabled
+
+
+class AciServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class EncryptionProperties(msrest.serialization.Model):
+ """EncryptionProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vault_base_url: str,
+ key_name: str,
+ key_version: str,
+ **kwargs
+ ):
+ super(EncryptionProperties, self).__init__(**kwargs)
+ self.vault_base_url = vault_base_url
+ self.key_name = key_name
+ self.key_version = key_version
+
+
+class AciServiceCreateRequestEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vault_base_url: str,
+ key_name: str,
+ key_version: str,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestEncryptionProperties, self).__init__(vault_base_url=vault_base_url, key_name=key_name, key_version=key_version, **kwargs)
+
+
+class VnetConfiguration(msrest.serialization.Model):
+ """VnetConfiguration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vnet_name: Optional[str] = None,
+ subnet_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(VnetConfiguration, self).__init__(**kwargs)
+ self.vnet_name = vnet_name
+ self.subnet_name = subnet_name
+
+
+class AciServiceCreateRequestVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vnet_name: Optional[str] = None,
+ subnet_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestVnetConfiguration, self).__init__(vnet_name=vnet_name, subnet_name=subnet_name, **kwargs)
+
+
+class ServiceResponseBase(msrest.serialization.Model):
+ """The base service response. The correct inherited response based on computeType will be returned (ex. ACIServiceResponse).
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceResponse, AksVariantResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceResponse', 'Custom': 'AksVariantResponse'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ **kwargs
+ ):
+ super(ServiceResponseBase, self).__init__(**kwargs)
+ self.description = description
+ self.kv_tags = kv_tags
+ self.properties = properties
+ self.state = None
+ self.error = None
+ self.compute_type = None # type: Optional[str]
+ self.deployment_type = deployment_type
+
+
+class AciServiceResponse(ServiceResponseBase):
+ """The response for an ACI service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param public_ip: The public IP address for the service.
+ :type public_ip: str
+ :param public_fqdn: The public Fqdn for the service.
+ :type public_fqdn: str
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'public_ip': {'key': 'publicIp', 'type': 'str'},
+ 'public_fqdn': {'key': 'publicFqdn', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ location: Optional[str] = None,
+ auth_enabled: Optional[bool] = None,
+ ssl_enabled: Optional[bool] = None,
+ app_insights_enabled: Optional[bool] = None,
+ data_collection: Optional["ModelDataCollection"] = None,
+ ssl_certificate: Optional[str] = None,
+ ssl_key: Optional[str] = None,
+ cname: Optional[str] = None,
+ public_ip: Optional[str] = None,
+ public_fqdn: Optional[str] = None,
+ models: Optional[List["Model"]] = None,
+ environment_image_request: Optional["EnvironmentImageResponse"] = None,
+ vnet_configuration: Optional["VnetConfiguration"] = None,
+ encryption_properties: Optional["EncryptionProperties"] = None,
+ **kwargs
+ ):
+ super(AciServiceResponse, self).__init__(description=description, kv_tags=kv_tags, properties=properties, deployment_type=deployment_type, **kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = container_resource_requirements
+ self.scoring_uri = None
+ self.location = location
+ self.auth_enabled = auth_enabled
+ self.ssl_enabled = ssl_enabled
+ self.app_insights_enabled = app_insights_enabled
+ self.data_collection = data_collection
+ self.ssl_certificate = ssl_certificate
+ self.ssl_key = ssl_key
+ self.cname = cname
+ self.public_ip = public_ip
+ self.public_fqdn = public_fqdn
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.models = models
+ self.environment_image_request = environment_image_request
+ self.vnet_configuration = vnet_configuration
+ self.encryption_properties = encryption_properties
+
+
+class AciServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AciServiceResponseDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class AciServiceResponseEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vault_base_url: str,
+ key_name: str,
+ key_version: str,
+ **kwargs
+ ):
+ super(AciServiceResponseEncryptionProperties, self).__init__(vault_base_url=vault_base_url, key_name=key_name, key_version=key_version, **kwargs)
+
+
+class EnvironmentImageResponse(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinitionResponse"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageResponse, self).__init__(**kwargs)
+ self.driver_program = driver_program
+ self.assets = assets
+ self.model_ids = model_ids
+ self.models = models
+ self.environment = environment
+ self.environment_reference = environment_reference
+
+
+class AciServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinitionResponse"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(AciServiceResponseEnvironmentImageRequest, self).__init__(driver_program=driver_program, assets=assets, model_ids=model_ids, models=models, environment=environment, environment_reference=environment_reference, **kwargs)
+
+
+class AciServiceResponseVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vnet_name: Optional[str] = None,
+ subnet_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AciServiceResponseVnetConfiguration, self).__init__(vnet_name=vnet_name, subnet_name=subnet_name, **kwargs)
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["AksProperties"] = None,
+ **kwargs
+ ):
+ super(Aks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = properties
+
+
+class ComputeConfiguration(msrest.serialization.Model):
+ """ComputeConfiguration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeConfiguration, AzureMlComputeConfiguration, ManagedComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeConfiguration', 'AzureMLCompute': 'AzureMlComputeConfiguration', 'Managed': 'ManagedComputeConfiguration'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeConfiguration(ComputeConfiguration):
+ """AksComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param namespace:
+ :type namespace: str
+ :param compute_name: Required.
+ :type compute_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'compute_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_name: str,
+ namespace: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.namespace = namespace
+ self.compute_name = compute_name
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_kube_config: Optional[str] = None,
+ admin_kube_config: Optional[str] = None,
+ image_pull_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = user_kube_config
+ self.admin_kube_config = admin_kube_config
+ self.image_pull_secret_name = image_pull_secret_name
+
+
+class DeploymentConfigurationBase(msrest.serialization.Model):
+ """DeploymentConfigurationBase.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksDeploymentConfiguration, ManagedDeploymentConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param app_insights_enabled:
+ :type app_insights_enabled: bool
+ :param max_concurrent_requests_per_instance:
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait_ms:
+ :type max_queue_wait_ms: int
+ :param scoring_timeout_ms:
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksDeploymentConfiguration', 'Managed': 'ManagedDeploymentConfiguration'}
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ max_concurrent_requests_per_instance: Optional[int] = None,
+ max_queue_wait_ms: Optional[int] = None,
+ scoring_timeout_ms: Optional[int] = None,
+ liveness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ **kwargs
+ ):
+ super(DeploymentConfigurationBase, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.app_insights_enabled = app_insights_enabled
+ self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance
+ self.max_queue_wait_ms = max_queue_wait_ms
+ self.scoring_timeout_ms = scoring_timeout_ms
+ self.liveness_probe_requirements = liveness_probe_requirements
+
+
+class AksDeploymentConfiguration(DeploymentConfigurationBase):
+ """AksDeploymentConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param app_insights_enabled:
+ :type app_insights_enabled: bool
+ :param max_concurrent_requests_per_instance:
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait_ms:
+ :type max_queue_wait_ms: int
+ :param scoring_timeout_ms:
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param container_resource_requirements: The resource requirements for the container (cpu and
+ memory).
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param model_data_collection: The Model data collection properties.
+ :type model_data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'model_data_collection': {'key': 'modelDataCollection', 'type': 'ModelDataCollection'},
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ max_concurrent_requests_per_instance: Optional[int] = None,
+ max_queue_wait_ms: Optional[int] = None,
+ scoring_timeout_ms: Optional[int] = None,
+ liveness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ model_data_collection: Optional["ModelDataCollection"] = None,
+ **kwargs
+ ):
+ super(AksDeploymentConfiguration, self).__init__(app_insights_enabled=app_insights_enabled, max_concurrent_requests_per_instance=max_concurrent_requests_per_instance, max_queue_wait_ms=max_queue_wait_ms, scoring_timeout_ms=scoring_timeout_ms, liveness_probe_requirements=liveness_probe_requirements, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.container_resource_requirements = container_resource_requirements
+ self.model_data_collection = model_data_collection
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ subnet_id: Optional[str] = None,
+ service_cidr: Optional[str] = None,
+ dns_service_ip: Optional[str] = None,
+ docker_bridge_cidr: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = subnet_id
+ self.service_cidr = service_cidr
+ self.dns_service_ip = dns_service_ip
+ self.docker_bridge_cidr = docker_bridge_cidr
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 1},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cluster_fqdn: Optional[str] = None,
+ agent_count: Optional[int] = None,
+ agent_vm_size: Optional[str] = None,
+ ssl_configuration: Optional["SslConfiguration"] = None,
+ aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = cluster_fqdn
+ self.system_services = None
+ self.agent_count = agent_count
+ self.agent_vm_size = agent_vm_size
+ self.ssl_configuration = ssl_configuration
+ self.aks_networking_configuration = aks_networking_configuration
+
+
+class AksReplicaStatus(msrest.serialization.Model):
+ """AksReplicaStatus.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ desired_replicas: Optional[int] = None,
+ updated_replicas: Optional[int] = None,
+ available_replicas: Optional[int] = None,
+ error: Optional["ErrorResponse"] = None,
+ **kwargs
+ ):
+ super(AksReplicaStatus, self).__init__(**kwargs)
+ self.desired_replicas = desired_replicas
+ self.updated_replicas = updated_replicas
+ self.available_replicas = available_replicas
+ self.error = error
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Error response information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.details = None
+
+
+class AksReplicaStatusError(ErrorResponse):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksReplicaStatusError, self).__init__(**kwargs)
+
+
+class CreateEndpointVariantRequest(CreateServiceRequest):
+ """The Variant properties.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceCreateRequest'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ **kwargs
+ ):
+ super(CreateEndpointVariantRequest, self).__init__(description=description, kv_tags=kv_tags, properties=properties, keys=keys, environment_image_request=environment_image_request, location=location, **kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = is_default
+ self.traffic_percentile = traffic_percentile
+ self.type = type
+
+
+class AksServiceCreateRequest(CreateEndpointVariantRequest):
+ """The request to create an AKS service.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param namespace: Kubernetes namespace for the service.
+ :type namespace: str
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ num_replicas: Optional[int] = None,
+ data_collection: Optional["ModelDataCollection"] = None,
+ compute_name: Optional[str] = None,
+ app_insights_enabled: Optional[bool] = None,
+ auto_scaler: Optional["AutoScaler"] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ max_concurrent_requests_per_container: Optional[int] = None,
+ max_queue_wait_ms: Optional[int] = None,
+ namespace: Optional[str] = None,
+ scoring_timeout_ms: Optional[int] = None,
+ auth_enabled: Optional[bool] = None,
+ liveness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ aad_auth_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequest, self).__init__(description=description, kv_tags=kv_tags, properties=properties, keys=keys, environment_image_request=environment_image_request, location=location, is_default=is_default, traffic_percentile=traffic_percentile, type=type, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.num_replicas = num_replicas
+ self.data_collection = data_collection
+ self.compute_name = compute_name
+ self.app_insights_enabled = app_insights_enabled
+ self.auto_scaler = auto_scaler
+ self.container_resource_requirements = container_resource_requirements
+ self.max_concurrent_requests_per_container = max_concurrent_requests_per_container
+ self.max_queue_wait_ms = max_queue_wait_ms
+ self.namespace = namespace
+ self.scoring_timeout_ms = scoring_timeout_ms
+ self.auth_enabled = auth_enabled
+ self.liveness_probe_requirements = liveness_probe_requirements
+ self.aad_auth_enabled = aad_auth_enabled
+
+
+class AutoScaler(msrest.serialization.Model):
+ """The Auto Scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ autoscale_enabled: Optional[bool] = None,
+ min_replicas: Optional[int] = None,
+ max_replicas: Optional[int] = None,
+ target_utilization: Optional[int] = None,
+ refresh_period_in_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AutoScaler, self).__init__(**kwargs)
+ self.autoscale_enabled = autoscale_enabled
+ self.min_replicas = min_replicas
+ self.max_replicas = max_replicas
+ self.target_utilization = target_utilization
+ self.refresh_period_in_seconds = refresh_period_in_seconds
+
+
+class AksServiceCreateRequestAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ autoscale_enabled: Optional[bool] = None,
+ min_replicas: Optional[int] = None,
+ max_replicas: Optional[int] = None,
+ target_utilization: Optional[int] = None,
+ refresh_period_in_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestAutoScaler, self).__init__(autoscale_enabled=autoscale_enabled, min_replicas=min_replicas, max_replicas=max_replicas, target_utilization=target_utilization, refresh_period_in_seconds=refresh_period_in_seconds, **kwargs)
+
+
+class AksServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class LivenessProbeRequirements(msrest.serialization.Model):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ failure_threshold: Optional[int] = None,
+ success_threshold: Optional[int] = None,
+ timeout_seconds: Optional[int] = None,
+ period_seconds: Optional[int] = None,
+ initial_delay_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(LivenessProbeRequirements, self).__init__(**kwargs)
+ self.failure_threshold = failure_threshold
+ self.success_threshold = success_threshold
+ self.timeout_seconds = timeout_seconds
+ self.period_seconds = period_seconds
+ self.initial_delay_seconds = initial_delay_seconds
+
+
+class AksServiceCreateRequestLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ failure_threshold: Optional[int] = None,
+ success_threshold: Optional[int] = None,
+ timeout_seconds: Optional[int] = None,
+ period_seconds: Optional[int] = None,
+ initial_delay_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestLivenessProbeRequirements, self).__init__(failure_threshold=failure_threshold, success_threshold=success_threshold, timeout_seconds=timeout_seconds, period_seconds=period_seconds, initial_delay_seconds=initial_delay_seconds, **kwargs)
+
+
+class AksVariantResponse(ServiceResponseBase):
+ """The response for an AKS variant.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceResponse'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ **kwargs
+ ):
+ super(AksVariantResponse, self).__init__(description=description, kv_tags=kv_tags, properties=properties, deployment_type=deployment_type, **kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = is_default
+ self.traffic_percentile = traffic_percentile
+ self.type = type
+
+
+class AksServiceResponse(AksVariantResponse):
+ """The response for an AKS service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param namespace: The Kubernetes namespace of the deployment.
+ :type namespace: str
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :ivar deployment_status: The deployment status.
+ :vartype deployment_status: ~azure_machine_learning_workspaces.models.AksReplicaStatus
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'deployment_status': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'deployment_status': {'key': 'deploymentStatus', 'type': 'AksReplicaStatus'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ models: Optional[List["Model"]] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ max_concurrent_requests_per_container: Optional[int] = None,
+ max_queue_wait_ms: Optional[int] = None,
+ compute_name: Optional[str] = None,
+ namespace: Optional[str] = None,
+ num_replicas: Optional[int] = None,
+ data_collection: Optional["ModelDataCollection"] = None,
+ app_insights_enabled: Optional[bool] = None,
+ auto_scaler: Optional["AutoScaler"] = None,
+ scoring_timeout_ms: Optional[int] = None,
+ liveness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ auth_enabled: Optional[bool] = None,
+ aad_auth_enabled: Optional[bool] = None,
+ environment_image_request: Optional["EnvironmentImageResponse"] = None,
+ **kwargs
+ ):
+ super(AksServiceResponse, self).__init__(description=description, kv_tags=kv_tags, properties=properties, deployment_type=deployment_type, is_default=is_default, traffic_percentile=traffic_percentile, type=type, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.models = models
+ self.container_resource_requirements = container_resource_requirements
+ self.max_concurrent_requests_per_container = max_concurrent_requests_per_container
+ self.max_queue_wait_ms = max_queue_wait_ms
+ self.compute_name = compute_name
+ self.namespace = namespace
+ self.num_replicas = num_replicas
+ self.data_collection = data_collection
+ self.app_insights_enabled = app_insights_enabled
+ self.auto_scaler = auto_scaler
+ self.scoring_uri = None
+ self.deployment_status = None
+ self.scoring_timeout_ms = scoring_timeout_ms
+ self.liveness_probe_requirements = liveness_probe_requirements
+ self.auth_enabled = auth_enabled
+ self.aad_auth_enabled = aad_auth_enabled
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.environment_image_request = environment_image_request
+
+
+class AksServiceResponseAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ autoscale_enabled: Optional[bool] = None,
+ min_replicas: Optional[int] = None,
+ max_replicas: Optional[int] = None,
+ target_utilization: Optional[int] = None,
+ refresh_period_in_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseAutoScaler, self).__init__(autoscale_enabled=autoscale_enabled, min_replicas=min_replicas, max_replicas=max_replicas, target_utilization=target_utilization, refresh_period_in_seconds=refresh_period_in_seconds, **kwargs)
+
+
+class AksServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class AksServiceResponseDeploymentStatus(AksReplicaStatus):
+ """The deployment status.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ desired_replicas: Optional[int] = None,
+ updated_replicas: Optional[int] = None,
+ available_replicas: Optional[int] = None,
+ error: Optional["ErrorResponse"] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseDeploymentStatus, self).__init__(desired_replicas=desired_replicas, updated_replicas=updated_replicas, available_replicas=available_replicas, error=error, **kwargs)
+
+
+class AksServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinitionResponse"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseEnvironmentImageRequest, self).__init__(driver_program=driver_program, assets=assets, model_ids=model_ids, models=models, environment=environment, environment_reference=environment_reference, **kwargs)
+
+
+class AksServiceResponseLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ failure_threshold: Optional[int] = None,
+ success_threshold: Optional[int] = None,
+ timeout_seconds: Optional[int] = None,
+ period_seconds: Optional[int] = None,
+ initial_delay_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseLivenessProbeRequirements, self).__init__(failure_threshold=failure_threshold, success_threshold=success_threshold, timeout_seconds=timeout_seconds, period_seconds=period_seconds, initial_delay_seconds=initial_delay_seconds, **kwargs)
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["AmlComputeProperties"] = None,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = properties
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ os_type: Optional[Union[str, "OsType"]] = "Linux",
+ vm_size: Optional[str] = None,
+ vm_priority: Optional[Union[str, "VmPriority"]] = None,
+ virtual_machine_image: Optional["VirtualMachineImage"] = None,
+ isolated_network: Optional[bool] = None,
+ scale_settings: Optional["ScaleSettings"] = None,
+ user_account_credentials: Optional["UserAccountCredentials"] = None,
+ subnet: Optional["ResourceId"] = None,
+ remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified",
+ enable_node_public_ip: Optional[bool] = True,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = os_type
+ self.vm_size = vm_size
+ self.vm_priority = vm_priority
+ self.virtual_machine_image = virtual_machine_image
+ self.isolated_network = isolated_network
+ self.scale_settings = scale_settings
+ self.user_account_credentials = user_account_credentials
+ self.subnet = subnet
+ self.remote_login_port_public_access = remote_login_port_public_access
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = enable_node_public_ip
+
+
+class IdentityConfiguration(msrest.serialization.Model):
+ """IdentityConfiguration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlTokenConfiguration, ManagedIdentityConfiguration, ServicePrincipalConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'identity_type': {'AMLToken': 'AmlTokenConfiguration', 'Managed': 'ManagedIdentityConfiguration', 'ServicePrincipal': 'ServicePrincipalConfiguration'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdentityConfiguration, self).__init__(**kwargs)
+ self.identity_type = None # type: Optional[str]
+
+
+class AmlTokenConfiguration(IdentityConfiguration):
+ """AmlTokenConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlTokenConfiguration, self).__init__(**kwargs)
+ self.identity_type = 'AMLToken' # type: str
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ display_name: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = id
+ self.display_name = display_name
+ self.description = description
+
+
+class AssetPath(msrest.serialization.Model):
+ """Details of an AssetUri.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param path: Required. The path of file/directory.
+ :type path: str
+ :param is_directory: Whether the path defines a directory or a single file.
+ :type is_directory: bool
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'path': {'key': 'path', 'type': 'str'},
+ 'is_directory': {'key': 'isDirectory', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: str,
+ is_directory: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AssetPath, self).__init__(**kwargs)
+ self.path = path
+ self.is_directory = is_directory
+
+
+class AssetReferenceBase(msrest.serialization.Model):
+ """AssetReferenceBase.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DataPathAssetReference, IdAssetReference, OutputPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssetReferenceBase, self).__init__(**kwargs)
+ self.reference_type = None # type: Optional[str]
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ object_id: str,
+ tenant_id: str,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = object_id
+ self.tenant_id = tenant_id
+
+
+class AuthKeys(msrest.serialization.Model):
+ """AuthKeys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_key: Optional[str] = None,
+ secondary_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(AuthKeys, self).__init__(**kwargs)
+ self.primary_key = primary_key
+ self.secondary_key = secondary_key
+
+
+class JobBase(msrest.serialization.Model):
+ """Job base definition.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ComputeJobBase, LabelingJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ }
+
+ _subtype_map = {
+ 'job_type': {'ComputeJobBase': 'ComputeJobBase', 'Labeling': 'LabelingJob'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(JobBase, self).__init__(**kwargs)
+ self.job_type = None # type: Optional[str]
+ self.provisioning_state = None
+ self.interaction_endpoints = None
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class ComputeJobBase(JobBase):
+ """Compute job base definition.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AutoMlJob, CommandJob, PipelineJob, SweepJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ }
+
+ _subtype_map = {
+ 'job_type': {'AutoML': 'AutoMlJob', 'Command': 'CommandJob', 'Pipeline': 'PipelineJob', 'Sweep': 'SweepJob'}
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_binding: "ComputeBinding",
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ experiment_name: Optional[str] = None,
+ priority: Optional[int] = None,
+ **kwargs
+ ):
+ super(ComputeJobBase, self).__init__(description=description, tags=tags, properties=properties, **kwargs)
+ self.job_type = 'ComputeJobBase' # type: str
+ self.experiment_name = experiment_name
+ self.compute_binding = compute_binding
+ self.output = None
+ self.priority = priority
+
+
+class AutoMlJob(ComputeJobBase):
+ """AutoML Job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param general_settings: General Settings.
+ :type general_settings: ~azure_machine_learning_workspaces.models.GeneralSettings
+ :param limit_settings: Limit Settings.
+ :type limit_settings: ~azure_machine_learning_workspaces.models.ExperimentLimits
+ :param data_settings: Collection of registered Tabular Dataset Ids required for training.
+ :type data_settings: ~azure_machine_learning_workspaces.models.DataSettings
+ :param featurization_settings: Featurization related configuration.
+ :type featurization_settings: ~azure_machine_learning_workspaces.models.FeaturizationSettings
+ :param forecasting_settings: Forecasting experiment specific configuration.
+ :type forecasting_settings: ~azure_machine_learning_workspaces.models.ForecastingSettings
+ :param training_settings: Advanced configuration settings for an AutoML Job.
+ :type training_settings: ~azure_machine_learning_workspaces.models.TrainingSettings
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'general_settings': {'key': 'generalSettings', 'type': 'GeneralSettings'},
+ 'limit_settings': {'key': 'limitSettings', 'type': 'ExperimentLimits'},
+ 'data_settings': {'key': 'dataSettings', 'type': 'DataSettings'},
+ 'featurization_settings': {'key': 'featurizationSettings', 'type': 'FeaturizationSettings'},
+ 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'},
+ 'training_settings': {'key': 'trainingSettings', 'type': 'TrainingSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_binding: "ComputeBinding",
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ experiment_name: Optional[str] = None,
+ priority: Optional[int] = None,
+ general_settings: Optional["GeneralSettings"] = None,
+ limit_settings: Optional["ExperimentLimits"] = None,
+ data_settings: Optional["DataSettings"] = None,
+ featurization_settings: Optional["FeaturizationSettings"] = None,
+ forecasting_settings: Optional["ForecastingSettings"] = None,
+ training_settings: Optional["TrainingSettings"] = None,
+ **kwargs
+ ):
+ super(AutoMlJob, self).__init__(description=description, tags=tags, properties=properties, experiment_name=experiment_name, compute_binding=compute_binding, priority=priority, **kwargs)
+ self.job_type = 'AutoML' # type: str
+ self.status = None
+ self.general_settings = general_settings
+ self.limit_settings = limit_settings
+ self.data_settings = data_settings
+ self.featurization_settings = featurization_settings
+ self.forecasting_settings = forecasting_settings
+ self.training_settings = training_settings
+
+
+class AzureDataLakeSection(msrest.serialization.Model):
+ """AzureDataLakeSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials: Required. Azure Data Lake credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param store_name: Required. Azure Data Lake store name.
+ :type store_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'store_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'store_name': {'key': 'storeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ credentials: "DatastoreCredentials",
+ store_name: str,
+ **kwargs
+ ):
+ super(AzureDataLakeSection, self).__init__(**kwargs)
+ self.credentials = credentials
+ self.store_name = store_name
+
+
+class AzureMlComputeConfiguration(ComputeConfiguration):
+ """AzureMlComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureMlComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'AzureMLCompute' # type: str
+
+
+class AzureMySqlSection(msrest.serialization.Model):
+ """AzureMySqlSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials: Required. Azure SQL database credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ credentials: "DatastoreCredentials",
+ database_name: str,
+ endpoint: str,
+ port_number: int,
+ server_name: str,
+ **kwargs
+ ):
+ super(AzureMySqlSection, self).__init__(**kwargs)
+ self.credentials = credentials
+ self.database_name = database_name
+ self.endpoint = endpoint
+ self.port_number = port_number
+ self.server_name = server_name
+
+
+class AzurePostgreSqlSection(msrest.serialization.Model):
+ """AzurePostgreSqlSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param enable_ssl: Whether the Azure PostgreSQL server requires SSL.
+ :type enable_ssl: bool
+ :param credentials: Required. Azure SQL database credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'enable_ssl': {'key': 'enableSSL', 'type': 'bool'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ credentials: "DatastoreCredentials",
+ database_name: str,
+ endpoint: str,
+ port_number: int,
+ server_name: str,
+ enable_ssl: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AzurePostgreSqlSection, self).__init__(**kwargs)
+ self.enable_ssl = enable_ssl
+ self.credentials = credentials
+ self.database_name = database_name
+ self.endpoint = endpoint
+ self.port_number = port_number
+ self.server_name = server_name
+
+
+class AzureSqlDatabaseSection(msrest.serialization.Model):
+ """AzureSqlDatabaseSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials: Required. Azure SQL database credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ credentials: "DatastoreCredentials",
+ database_name: str,
+ endpoint: str,
+ port_number: int,
+ server_name: str,
+ **kwargs
+ ):
+ super(AzureSqlDatabaseSection, self).__init__(**kwargs)
+ self.credentials = credentials
+ self.database_name = database_name
+ self.endpoint = endpoint
+ self.port_number = port_number
+ self.server_name = server_name
+
+
+class AzureStorageSection(msrest.serialization.Model):
+ """AzureStorageSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param blob_cache_timeout: Blob storage cache timeout.
+ :type blob_cache_timeout: int
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Storage account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'blob_cache_timeout': {'key': 'blobCacheTimeout', 'type': 'int'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ account_name: str,
+ container_name: str,
+ credentials: "DatastoreCredentials",
+ endpoint: str,
+ protocol: str,
+ blob_cache_timeout: Optional[int] = None,
+ **kwargs
+ ):
+ super(AzureStorageSection, self).__init__(**kwargs)
+ self.account_name = account_name
+ self.blob_cache_timeout = blob_cache_timeout
+ self.container_name = container_name
+ self.credentials = credentials
+ self.endpoint = endpoint
+ self.protocol = protocol
+
+
+class EarlyTerminationPolicyConfiguration(msrest.serialization.Model):
+ """Early termination policies enable canceling poor-performing runs before they complete.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: BanditPolicyConfiguration, MedianStoppingPolicyConfiguration, TruncationSelectionPolicyConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ }
+
+ _subtype_map = {
+ 'policy_type': {'Bandit': 'BanditPolicyConfiguration', 'MedianStopping': 'MedianStoppingPolicyConfiguration', 'TruncationSelection': 'TruncationSelectionPolicyConfiguration'}
+ }
+
+ def __init__(
+ self,
+ *,
+ evaluation_interval: Optional[int] = None,
+ delay_evaluation: Optional[int] = None,
+ **kwargs
+ ):
+ super(EarlyTerminationPolicyConfiguration, self).__init__(**kwargs)
+ self.policy_type = None # type: Optional[str]
+ self.evaluation_interval = evaluation_interval
+ self.delay_evaluation = delay_evaluation
+
+
+class BanditPolicyConfiguration(EarlyTerminationPolicyConfiguration):
+ """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ :param slack_factor:
+ :type slack_factor: float
+ :param slack_amount:
+ :type slack_amount: float
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'slack_factor': {'key': 'slackFactor', 'type': 'float'},
+ 'slack_amount': {'key': 'slackAmount', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ *,
+ evaluation_interval: Optional[int] = None,
+ delay_evaluation: Optional[int] = None,
+ slack_factor: Optional[float] = None,
+ slack_amount: Optional[float] = None,
+ **kwargs
+ ):
+ super(BanditPolicyConfiguration, self).__init__(evaluation_interval=evaluation_interval, delay_evaluation=delay_evaluation, **kwargs)
+ self.policy_type = 'Bandit' # type: str
+ self.slack_factor = slack_factor
+ self.slack_amount = slack_amount
+
+
+class CertificateSection(msrest.serialization.Model):
+ """CertificateSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param certificate: Service principal certificate.
+ :type certificate: str
+ :param thumbprint: Required. Thumbprint of the certificate used for authentication.
+ :type thumbprint: str
+ """
+
+ _validation = {
+ 'tenant_id': {'required': True},
+ 'client_id': {'required': True},
+ 'thumbprint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'certificate': {'key': 'certificate', 'type': 'str'},
+ 'thumbprint': {'key': 'thumbprint', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tenant_id: str,
+ client_id: str,
+ thumbprint: str,
+ authority_url: Optional[str] = None,
+ resource_uri: Optional[str] = None,
+ certificate: Optional[str] = None,
+ **kwargs
+ ):
+ super(CertificateSection, self).__init__(**kwargs)
+ self.authority_url = authority_url
+ self.resource_uri = resource_uri
+ self.tenant_id = tenant_id
+ self.client_id = client_id
+ self.certificate = certificate
+ self.thumbprint = thumbprint
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scale_settings: Optional["ScaleSettings"] = None,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = scale_settings
+
+
+class ExportSummary(msrest.serialization.Model):
+ """ExportSummary.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: CsvExportSummary, CocoExportSummary, DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ }
+
+ _subtype_map = {
+ 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ExportSummary, self).__init__(**kwargs)
+ self.format = None # type: Optional[str]
+ self.labeling_job_id = None
+ self.exported_row_count = None
+ self.start_time_utc = None
+ self.end_time_utc = None
+
+
+class CocoExportSummary(ExportSummary):
+ """CocoExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ 'container_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CocoExportSummary, self).__init__(**kwargs)
+ self.format = 'Coco' # type: str
+ self.snapshot_path = None
+ self.container_name = None
+
+
+class CodeConfiguration(msrest.serialization.Model):
+ """CodeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code_artifact_id: The ID of the code asset.
+ :type code_artifact_id: str
+ :param command: Required. The command to execute on startup of the job. eg. ["python",
+ "train.py"].
+ :type command: str
+ """
+
+ _validation = {
+ 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'code_artifact_id': {'key': 'codeArtifactId', 'type': 'str'},
+ 'command': {'key': 'command', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ command: str,
+ code_artifact_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(CodeConfiguration, self).__init__(**kwargs)
+ self.code_artifact_id = code_artifact_id
+ self.command = command
+
+
+class CodeContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param description:
+ :type description: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(CodeContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.properties = properties
+ self.tags = tags
+ self.description = description
+
+
+class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeContainer entities.
+
+ :param value: An array of objects of type CodeContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[CodeContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["CodeContainerResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class CodeVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param datastore_id: The asset datastoreId.
+ :type datastore_id: str
+ :param asset_path: DEPRECATED - use
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead.
+ :type asset_path: ~azure_machine_learning_workspaces.models.AssetPath
+ :param path: The path of the file/directory.
+ :type path: str
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'datastore_id': {'key': 'properties.datastoreId', 'type': 'str'},
+ 'asset_path': {'key': 'properties.assetPath', 'type': 'AssetPath'},
+ 'path': {'key': 'properties.path', 'type': 'str'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ datastore_id: Optional[str] = None,
+ asset_path: Optional["AssetPath"] = None,
+ path: Optional[str] = None,
+ generated_by: Optional[Union[str, "AssetGenerator"]] = None,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(CodeVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.datastore_id = datastore_id
+ self.asset_path = asset_path
+ self.path = path
+ self.generated_by = generated_by
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeVersion entities.
+
+ :param value: An array of objects of type CodeVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[CodeVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["CodeVersionResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class CommandJob(ComputeJobBase):
+ """Code Job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param max_run_duration_seconds: The max run duration in seconds, after which the job will be
+ cancelled.
+ :type max_run_duration_seconds: long
+ :param code_configuration: Required. Code configuration of the job.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param environment_id: Environment specification of the job.
+ :type environment_id: str
+ :param data_bindings: Mapping of data bindings used in the job.
+ :type data_bindings: dict[str, ~azure_machine_learning_workspaces.models.DataBinding]
+ :param distribution_configuration:
+ :type distribution_configuration:
+ ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param identity_configuration:
+ :type identity_configuration: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ :ivar parameters: Input parameters.
+ :vartype parameters: dict[str, object]
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ 'code_configuration': {'required': True},
+ 'parameters': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'long'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'data_bindings': {'key': 'dataBindings', 'type': '{DataBinding}'},
+ 'distribution_configuration': {'key': 'distributionConfiguration', 'type': 'DistributionConfiguration'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'identity_configuration': {'key': 'identityConfiguration', 'type': 'IdentityConfiguration'},
+ 'parameters': {'key': 'parameters', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_binding: "ComputeBinding",
+ code_configuration: "CodeConfiguration",
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ experiment_name: Optional[str] = None,
+ priority: Optional[int] = None,
+ max_run_duration_seconds: Optional[int] = None,
+ environment_id: Optional[str] = None,
+ data_bindings: Optional[Dict[str, "DataBinding"]] = None,
+ distribution_configuration: Optional["DistributionConfiguration"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ identity_configuration: Optional["IdentityConfiguration"] = None,
+ **kwargs
+ ):
+ super(CommandJob, self).__init__(description=description, tags=tags, properties=properties, experiment_name=experiment_name, compute_binding=compute_binding, priority=priority, **kwargs)
+ self.job_type = 'Command' # type: str
+ self.status = None
+ self.max_run_duration_seconds = max_run_duration_seconds
+ self.code_configuration = code_configuration
+ self.environment_id = environment_id
+ self.data_bindings = data_bindings
+ self.distribution_configuration = distribution_configuration
+ self.environment_variables = environment_variables
+ self.identity_configuration = identity_configuration
+ self.parameters = None
+
+
+class Component(msrest.serialization.Model):
+ """Component.
+
+ :param component_type: Component Type, should match the schema. Possible values include:
+ "CommandComponent".
+ :type component_type: str or ~azure_machine_learning_workspaces.models.ComponentType
+ :param display_name: DisplayName of the component on the UI. Defaults to same as name.
+ :type display_name: str
+ :param is_deterministic: Whether or not its deterministic. Defaults to true.
+ :type is_deterministic: bool
+ :param inputs: Defines input ports of the component. The string key is the name of input, which
+ should be a valid Python variable name.
+ :type inputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentInput]
+ :param outputs: Defines output ports of the component. The string key is the name of Output,
+ which should be a valid Python variable name.
+ :type outputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentOutput]
+ """
+
+ _attribute_map = {
+ 'component_type': {'key': 'componentType', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
+ 'inputs': {'key': 'inputs', 'type': '{ComponentInput}'},
+ 'outputs': {'key': 'outputs', 'type': '{ComponentOutput}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ component_type: Optional[Union[str, "ComponentType"]] = None,
+ display_name: Optional[str] = None,
+ is_deterministic: Optional[bool] = None,
+ inputs: Optional[Dict[str, "ComponentInput"]] = None,
+ outputs: Optional[Dict[str, "ComponentOutput"]] = None,
+ **kwargs
+ ):
+ super(Component, self).__init__(**kwargs)
+ self.component_type = component_type
+ self.display_name = display_name
+ self.is_deterministic = is_deterministic
+ self.inputs = inputs
+ self.outputs = outputs
+
+
+class ComponentContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(ComponentContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class ComponentContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ComponentContainer entities.
+
+ :param value: An array of objects of type ComponentContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.ComponentContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComponentContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ComponentContainerResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class ComponentInput(msrest.serialization.Model):
+ """ComponentInput.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ComponentInputEnum, ComponentInputGeneric, ComponentInputRangedNumber.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'component_input_type': {'Enum': 'ComponentInputEnum', 'Generic': 'ComponentInputGeneric', 'RangedNumber': 'ComponentInputRangedNumber'}
+ }
+
+ def __init__(
+ self,
+ *,
+ data_type: str,
+ optional: Optional[bool] = None,
+ description: Optional[str] = None,
+ default: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentInput, self).__init__(**kwargs)
+ self.component_input_type = None # type: Optional[str]
+ self.optional = optional
+ self.description = description
+ self.default = default
+ self.data_type = data_type
+
+
+class ComponentInputEnum(ComponentInput):
+ """ComponentInputEnum.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ :param enum: The enum definition list for enum types, used to validate the inputs for type
+ enum.
+ :type enum: list[str]
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ 'enum': {'key': 'enum', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_type: str,
+ optional: Optional[bool] = None,
+ description: Optional[str] = None,
+ default: Optional[str] = None,
+ enum: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(ComponentInputEnum, self).__init__(optional=optional, description=description, default=default, data_type=data_type, **kwargs)
+ self.component_input_type = 'Enum' # type: str
+ self.enum = enum
+
+
+class ComponentInputGeneric(ComponentInput):
+ """ComponentInputGeneric.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_type: str,
+ optional: Optional[bool] = None,
+ description: Optional[str] = None,
+ default: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentInputGeneric, self).__init__(optional=optional, description=description, default=default, data_type=data_type, **kwargs)
+ self.component_input_type = 'Generic' # type: str
+
+
+class ComponentInputRangedNumber(ComponentInput):
+ """ComponentInputRangedNumber.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param component_input_type: Required. Type of ComponentInput.Constant filled by server.
+ Possible values include: "Generic", "RangedNumber", "Enum".
+ :type component_input_type: str or ~azure_machine_learning_workspaces.models.ComponentInputType
+ :param optional: If the input is optional. Defaults to false/required.
+ :type optional: bool
+ :param description: Description for input.
+ :type description: str
+ :param default: Default value for an input. Must match the given type.
+ :type default: str
+ :param data_type: Required. Component input type. String is used for type extensibility.
+ :type data_type: str
+ :param min: The minimum value that can be accepted, used to validate the inputs for type
+ float/int.
+ :type min: str
+ :param max: The maximum value that can be accepted, used to validate the inputs for type
+ float/int.
+ :type max: str
+ """
+
+ _validation = {
+ 'component_input_type': {'required': True},
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'component_input_type': {'key': 'componentInputType', 'type': 'str'},
+ 'optional': {'key': 'optional', 'type': 'bool'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'default': {'key': 'default', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ 'min': {'key': 'min', 'type': 'str'},
+ 'max': {'key': 'max', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_type: str,
+ optional: Optional[bool] = None,
+ description: Optional[str] = None,
+ default: Optional[str] = None,
+ min: Optional[str] = None,
+ max: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentInputRangedNumber, self).__init__(optional=optional, description=description, default=default, data_type=data_type, **kwargs)
+ self.component_input_type = 'RangedNumber' # type: str
+ self.min = min
+ self.max = max
+
+
+class ComponentJob(msrest.serialization.Model):
+ """ComponentJob.
+
+ :param compute_binding: Compute definition for job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :param component_id: Reference to component artifact.
+ :type component_id: str
+ :param inputs: Data input set for job.
+ :type inputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentJobInput]
+ :param outputs: Data output set for job.
+ :type outputs: dict[str, ~azure_machine_learning_workspaces.models.ComponentJobOutput]
+ """
+
+ _attribute_map = {
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'component_id': {'key': 'componentId', 'type': 'str'},
+ 'inputs': {'key': 'inputs', 'type': '{ComponentJobInput}'},
+ 'outputs': {'key': 'outputs', 'type': '{ComponentJobOutput}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_binding: Optional["ComputeBinding"] = None,
+ component_id: Optional[str] = None,
+ inputs: Optional[Dict[str, "ComponentJobInput"]] = None,
+ outputs: Optional[Dict[str, "ComponentJobOutput"]] = None,
+ **kwargs
+ ):
+ super(ComponentJob, self).__init__(**kwargs)
+ self.compute_binding = compute_binding
+ self.component_id = component_id
+ self.inputs = inputs
+ self.outputs = outputs
+
+
+class ComponentJobInput(msrest.serialization.Model):
+ """ComponentJobInput.
+
+ :param data: Input data definition.
+ :type data: ~azure_machine_learning_workspaces.models.InputData
+ :param input_binding: Reference to an output of another job's ComponentJobInput or reference to
+ a ComponentJobInput. Example "input2".
+ :type input_binding: str
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'InputData'},
+ 'input_binding': {'key': 'inputBinding', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data: Optional["InputData"] = None,
+ input_binding: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentJobInput, self).__init__(**kwargs)
+ self.data = data
+ self.input_binding = input_binding
+
+
+class ComponentJobOutput(msrest.serialization.Model):
+ """ComponentJobOutput.
+
+ :param data: Output data definition.
+ :type data: ~azure_machine_learning_workspaces.models.OutputData
+ :param output_binding: This is to pull the ComponentJobOutput from the overall PipelineOutputs.
+ Example "outputPath".
+ :type output_binding: str
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'OutputData'},
+ 'output_binding': {'key': 'outputBinding', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data: Optional["OutputData"] = None,
+ output_binding: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentJobOutput, self).__init__(**kwargs)
+ self.data = data
+ self.output_binding = output_binding
+
+
+class ComponentOutput(msrest.serialization.Model):
+ """ComponentOutput.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: Description for output.
+ :type description: str
+ :param data_type: Required. Component output type. String is used for type extensibility.
+ :type data_type: str
+ """
+
+ _validation = {
+ 'data_type': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'data_type': {'key': 'dataType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_type: str,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentOutput, self).__init__(**kwargs)
+ self.description = description
+ self.data_type = data_type
+
+
+class ComponentVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param environment_id: Environment configuration of the component.
+ :type environment_id: str
+ :param code_configuration: Required. Code configuration of the job. Includes CodeArtifactId and
+ Command.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param component: Component definition details.
+ :type component: ~azure_machine_learning_workspaces.models.Component
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'code_configuration': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'environment_id': {'key': 'properties.environmentId', 'type': 'str'},
+ 'code_configuration': {'key': 'properties.codeConfiguration', 'type': 'CodeConfiguration'},
+ 'component': {'key': 'properties.component', 'type': 'Component'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ code_configuration: "CodeConfiguration",
+ environment_id: Optional[str] = None,
+ component: Optional["Component"] = None,
+ generated_by: Optional[Union[str, "AssetGenerator"]] = None,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(ComponentVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.environment_id = environment_id
+ self.code_configuration = code_configuration
+ self.component = component
+ self.generated_by = generated_by
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ComponentVersion entities.
+
+ :param value: An array of objects of type ComponentVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.ComponentVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComponentVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ComponentVersionResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComponentVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class ComputeBinding(msrest.serialization.Model):
+ """Compute binding definition.
+
+ :param compute_id: Resource ID of the compute resource.
+ :type compute_id: str
+ :param node_count: Number of nodes.
+ :type node_count: int
+ :param is_local: Set to true for jobs running on local compute.
+ :type is_local: bool
+ """
+
+ _attribute_map = {
+ 'compute_id': {'key': 'computeId', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'is_local': {'key': 'isLocal', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_id: Optional[str] = None,
+ node_count: Optional[int] = None,
+ is_local: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ComputeBinding, self).__init__(**kwargs)
+ self.compute_id = compute_id
+ self.node_count = node_count
+ self.is_local = is_local
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["ComputeInstanceProperties"] = None,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = properties
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ endpoint_uri: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.endpoint_uri = endpoint_uri
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ operation_name: Optional[Union[str, "OperationName"]] = None,
+ operation_time: Optional[datetime.datetime] = None,
+ operation_status: Optional[Union[str, "OperationStatus"]] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = operation_name
+ self.operation_time = operation_time
+ self.operation_status = operation_status
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vm_size: Optional[str] = None,
+ subnet: Optional["ResourceId"] = None,
+ application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared",
+ ssh_settings: Optional["ComputeInstanceSshSettings"] = None,
+ compute_instance_authorization_type: Optional[Union[str, "ComputeInstanceAuthorizationType"]] = "personal",
+ personal_compute_instance_settings: Optional["PersonalComputeInstanceSettings"] = None,
+ setup_scripts: Optional["SetupScripts"] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = vm_size
+ self.subnet = subnet
+ self.application_sharing_policy = application_sharing_policy
+ self.ssh_settings = ssh_settings
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = compute_instance_authorization_type
+ self.personal_compute_instance_settings = personal_compute_instance_settings
+ self.setup_scripts = setup_scripts
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled",
+ admin_public_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = ssh_public_access
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = admin_public_key
+
+
+class Resource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.identity = identity
+ self.location = location
+ self.type = None
+ self.tags = tags
+ self.sku = sku
+
+
+class ComputeResource(Resource):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ properties: Optional["Compute"] = None,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.properties = properties
+
+
+class ContainerRegistry(msrest.serialization.Model):
+ """ContainerRegistry.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ **kwargs
+ ):
+ super(ContainerRegistry, self).__init__(**kwargs)
+ self.address = address
+ self.username = username
+ self.password = password
+
+
+class ContainerRegistryResponse(msrest.serialization.Model):
+ """ContainerRegistryResponse.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ **kwargs
+ ):
+ super(ContainerRegistryResponse, self).__init__(**kwargs)
+ self.address = address
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The number of CPU cores on the container.
+ :type cpu: float
+ :param memory_in_gb: The amount of memory on the container in GB.
+ :type memory_in_gb: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cpu: Optional[float] = None,
+ memory_in_gb: Optional[float] = None,
+ gpu: Optional[int] = None,
+ fpga: Optional[int] = None,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = cpu
+ self.memory_in_gb = memory_in_gb
+ self.gpu = gpu
+ self.fpga = fpga
+
+
+class EnvironmentImageRequest(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinition"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageRequest, self).__init__(**kwargs)
+ self.driver_program = driver_program
+ self.assets = assets
+ self.model_ids = model_ids
+ self.models = models
+ self.environment = environment
+ self.environment_reference = environment_reference
+
+
+class CreateServiceRequestEnvironmentImageRequest(EnvironmentImageRequest):
+ """The Environment, models and assets needed for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinition"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(CreateServiceRequestEnvironmentImageRequest, self).__init__(driver_program=driver_program, assets=assets, model_ids=model_ids, models=models, environment=environment, environment_reference=environment_reference, **kwargs)
+
+
+class CreateServiceRequestKeys(AuthKeys):
+ """The authentication keys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_key: Optional[str] = None,
+ secondary_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(CreateServiceRequestKeys, self).__init__(primary_key=primary_key, secondary_key=secondary_key, **kwargs)
+
+
+class CsvExportSummary(ExportSummary):
+ """CsvExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ 'container_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CsvExportSummary, self).__init__(**kwargs)
+ self.format = 'CSV' # type: str
+ self.snapshot_path = None
+ self.container_name = None
+
+
+class DataBinding(msrest.serialization.Model):
+ """Data binding definition.
+
+ :param source_data_reference: Reference to source data artifact.
+ :type source_data_reference: str
+ :param local_reference: Location of data inside the container process.
+ :type local_reference: str
+ :param mode: Mechanism for accessing the data artifact. Possible values include: "Mount",
+ "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ """
+
+ _attribute_map = {
+ 'source_data_reference': {'key': 'sourceDataReference', 'type': 'str'},
+ 'local_reference': {'key': 'localReference', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ source_data_reference: Optional[str] = None,
+ local_reference: Optional[str] = None,
+ mode: Optional[Union[str, "DataBindingMode"]] = None,
+ **kwargs
+ ):
+ super(DataBinding, self).__init__(**kwargs)
+ self.source_data_reference = source_data_reference
+ self.local_reference = local_reference
+ self.mode = mode
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["DatabricksProperties"] = None,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = properties
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = databricks_access_token
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = databricks_access_token
+
+
+class DataContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param description:
+ :type description: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.properties = properties
+ self.tags = tags
+ self.description = description
+
+
+class DataContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataContainer entities.
+
+ :param value: An array of objects of type DataContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.DataContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[DataContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["DataContainerResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["DataLakeAnalyticsProperties"] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = properties
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_lake_store_account_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = data_lake_store_account_name
+
+
+class DataPathAssetReference(AssetReferenceBase):
+ """DataPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param path:
+ :type path: str
+ :param datastore_id:
+ :type datastore_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: Optional[str] = None,
+ datastore_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'DataPath' # type: str
+ self.path = path
+ self.datastore_id = datastore_id
+
+
+class DatasetExportSummary(ExportSummary):
+ """DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar labeled_asset_name: The unique name of the labeled data asset.
+ :vartype labeled_asset_name: str
+ """
+
+ _validation = {
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'end_time_utc': {'readonly': True},
+ 'labeled_asset_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatasetExportSummary, self).__init__(**kwargs)
+ self.format = 'Dataset' # type: str
+ self.labeled_asset_name = None
+
+
+class DatasetReference(msrest.serialization.Model):
+ """The dataset reference object.
+
+ :param name: The name of the dataset reference.
+ :type name: str
+ :param id: The id of the dataset reference.
+ :type id: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ id: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatasetReference, self).__init__(**kwargs)
+ self.name = name
+ self.id = id
+
+
+class DataSettings(msrest.serialization.Model):
+ """This class represents the Dataset Json that is passed into Jasmine for training.
+
+ :param training_data: The training_data.
+ :type training_data: ~azure_machine_learning_workspaces.models.TrainingDataSettings
+ :param validation_data: The validation_data.
+ :type validation_data: ~azure_machine_learning_workspaces.models.ValidationDataSettings
+ """
+
+ _attribute_map = {
+ 'training_data': {'key': 'trainingData', 'type': 'TrainingDataSettings'},
+ 'validation_data': {'key': 'validationData', 'type': 'ValidationDataSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ training_data: Optional["TrainingDataSettings"] = None,
+ validation_data: Optional["ValidationDataSettings"] = None,
+ **kwargs
+ ):
+ super(DataSettings, self).__init__(**kwargs)
+ self.training_data = training_data
+ self.validation_data = validation_data
+
+
+class DatastoreContents(msrest.serialization.Model):
+ """DatastoreContents.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_contents_type: Required. Storage type backing the datastore. Possible values
+ include: "AzureBlob", "AzureDataLake", "AzureDataLakeGen2", "AzureFile", "AzureMySql",
+ "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type datastore_contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param azure_data_lake: Azure Data Lake (Gen1/2) storage information.
+ :type azure_data_lake: ~azure_machine_learning_workspaces.models.AzureDataLakeSection
+ :param azure_my_sql: Azure Database for MySQL information.
+ :type azure_my_sql: ~azure_machine_learning_workspaces.models.AzureMySqlSection
+ :param azure_postgre_sql: Azure Database for PostgreSQL information.
+ :type azure_postgre_sql: ~azure_machine_learning_workspaces.models.AzurePostgreSqlSection
+ :param azure_sql_database: Azure SQL Database information.
+ :type azure_sql_database: ~azure_machine_learning_workspaces.models.AzureSqlDatabaseSection
+ :param azure_storage: Azure storage account (blobs, files) information.
+ :type azure_storage: ~azure_machine_learning_workspaces.models.AzureStorageSection
+ :param gluster_fs: GlusterFS volume information.
+ :type gluster_fs: ~azure_machine_learning_workspaces.models.GlusterFsSection
+ """
+
+ _validation = {
+ 'datastore_contents_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'datastore_contents_type': {'key': 'datastoreContentsType', 'type': 'str'},
+ 'azure_data_lake': {'key': 'azureDataLake', 'type': 'AzureDataLakeSection'},
+ 'azure_my_sql': {'key': 'azureMySql', 'type': 'AzureMySqlSection'},
+ 'azure_postgre_sql': {'key': 'azurePostgreSql', 'type': 'AzurePostgreSqlSection'},
+ 'azure_sql_database': {'key': 'azureSqlDatabase', 'type': 'AzureSqlDatabaseSection'},
+ 'azure_storage': {'key': 'azureStorage', 'type': 'AzureStorageSection'},
+ 'gluster_fs': {'key': 'glusterFs', 'type': 'GlusterFsSection'},
+ }
+
+ def __init__(
+ self,
+ *,
+ datastore_contents_type: Union[str, "ContentsType"],
+ azure_data_lake: Optional["AzureDataLakeSection"] = None,
+ azure_my_sql: Optional["AzureMySqlSection"] = None,
+ azure_postgre_sql: Optional["AzurePostgreSqlSection"] = None,
+ azure_sql_database: Optional["AzureSqlDatabaseSection"] = None,
+ azure_storage: Optional["AzureStorageSection"] = None,
+ gluster_fs: Optional["GlusterFsSection"] = None,
+ **kwargs
+ ):
+ super(DatastoreContents, self).__init__(**kwargs)
+ self.datastore_contents_type = datastore_contents_type
+ self.azure_data_lake = azure_data_lake
+ self.azure_my_sql = azure_my_sql
+ self.azure_postgre_sql = azure_postgre_sql
+ self.azure_sql_database = azure_sql_database
+ self.azure_storage = azure_storage
+ self.gluster_fs = gluster_fs
+
+
+class DatastoreCredentials(msrest.serialization.Model):
+ """DatastoreCredentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_credentials_type: Required. Credential type used to authentication with
+ storage. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type datastore_credentials_type: str or
+ ~azure_machine_learning_workspaces.models.CredentialsType
+ :param account_key: Storage account key authentication.
+ :type account_key: ~azure_machine_learning_workspaces.models.AccountKeySection
+ :param certificate: Service principal certificate authentication.
+ :type certificate: ~azure_machine_learning_workspaces.models.CertificateSection
+ :param sas: Storage container SAS token authentication.
+ :type sas: ~azure_machine_learning_workspaces.models.SasSection
+ :param service_principal: Service principal password authentication.
+ :type service_principal: ~azure_machine_learning_workspaces.models.ServicePrincipalSection
+ :param sql_admin: SQL user/password authentication.
+ :type sql_admin: ~azure_machine_learning_workspaces.models.SqlAdminSection
+ """
+
+ _validation = {
+ 'datastore_credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'datastore_credentials_type': {'key': 'datastoreCredentialsType', 'type': 'str'},
+ 'account_key': {'key': 'accountKey', 'type': 'AccountKeySection'},
+ 'certificate': {'key': 'certificate', 'type': 'CertificateSection'},
+ 'sas': {'key': 'sas', 'type': 'SasSection'},
+ 'service_principal': {'key': 'servicePrincipal', 'type': 'ServicePrincipalSection'},
+ 'sql_admin': {'key': 'sqlAdmin', 'type': 'SqlAdminSection'},
+ }
+
+ def __init__(
+ self,
+ *,
+ datastore_credentials_type: Union[str, "CredentialsType"],
+ account_key: Optional["AccountKeySection"] = None,
+ certificate: Optional["CertificateSection"] = None,
+ sas: Optional["SasSection"] = None,
+ service_principal: Optional["ServicePrincipalSection"] = None,
+ sql_admin: Optional["SqlAdminSection"] = None,
+ **kwargs
+ ):
+ super(DatastoreCredentials, self).__init__(**kwargs)
+ self.datastore_credentials_type = datastore_credentials_type
+ self.account_key = account_key
+ self.certificate = certificate
+ self.sas = sas
+ self.service_principal = service_principal
+ self.sql_admin = sql_admin
+
+
+class DatastorePropertiesResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param contents: Required. Reference to the datastore storage contents.
+ :type contents: ~azure_machine_learning_workspaces.models.DatastoreContents
+ :ivar has_been_validated: Whether the service has validated access to the datastore with the
+ provided credentials.
+ :vartype has_been_validated: bool
+ :param is_default: Whether this datastore is the default for the workspace.
+ :type is_default: bool
+ :param linked_info: Information about the datastore origin, if linked.
+ :type linked_info: ~azure_machine_learning_workspaces.models.LinkedInfo
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'contents': {'required': True},
+ 'has_been_validated': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'contents': {'key': 'properties.contents', 'type': 'DatastoreContents'},
+ 'has_been_validated': {'key': 'properties.hasBeenValidated', 'type': 'bool'},
+ 'is_default': {'key': 'properties.isDefault', 'type': 'bool'},
+ 'linked_info': {'key': 'properties.linkedInfo', 'type': 'LinkedInfo'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ contents: "DatastoreContents",
+ is_default: Optional[bool] = None,
+ linked_info: Optional["LinkedInfo"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(DatastorePropertiesResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.contents = contents
+ self.has_been_validated = None
+ self.is_default = is_default
+ self.linked_info = linked_info
+ self.properties = properties
+ self.description = description
+ self.tags = tags
+
+
+class DatastorePropertiesResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DatastoreProperties entities.
+
+ :param value: An array of objects of type DatastoreProperties.
+ :type value: list[~azure_machine_learning_workspaces.models.DatastorePropertiesResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[DatastorePropertiesResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["DatastorePropertiesResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatastorePropertiesResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class DataVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param dataset_type: The Format of dataset. Possible values include: "Simple", "Dataflow".
+ :type dataset_type: str or ~azure_machine_learning_workspaces.models.DatasetType
+ :param datastore_id: The asset datastoreId.
+ :type datastore_id: str
+ :param asset_path: DEPRECATED - use
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead.
+ :type asset_path: ~azure_machine_learning_workspaces.models.AssetPath
+ :param path: The path of the file/directory.
+ :type path: str
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'dataset_type': {'key': 'properties.datasetType', 'type': 'str'},
+ 'datastore_id': {'key': 'properties.datastoreId', 'type': 'str'},
+ 'asset_path': {'key': 'properties.assetPath', 'type': 'AssetPath'},
+ 'path': {'key': 'properties.path', 'type': 'str'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ dataset_type: Optional[Union[str, "DatasetType"]] = None,
+ datastore_id: Optional[str] = None,
+ asset_path: Optional["AssetPath"] = None,
+ path: Optional[str] = None,
+ generated_by: Optional[Union[str, "AssetGenerator"]] = None,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(DataVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.dataset_type = dataset_type
+ self.datastore_id = datastore_id
+ self.asset_path = asset_path
+ self.path = path
+ self.generated_by = generated_by
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class DataVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataVersion entities.
+
+ :param value: An array of objects of type DataVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.DataVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[DataVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["DataVersionResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class DeploymentLogs(msrest.serialization.Model):
+ """DeploymentLogs.
+
+ :param content:
+ :type content: str
+ """
+
+ _attribute_map = {
+ 'content': {'key': 'content', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ content: Optional[str] = None,
+ **kwargs
+ ):
+ super(DeploymentLogs, self).__init__(**kwargs)
+ self.content = content
+
+
+class DeploymentLogsRequest(msrest.serialization.Model):
+ """DeploymentLogsRequest.
+
+ :param container_type: The type of container to retrieve logs from. Possible values include:
+ "StorageInitializer", "InferenceServer".
+ :type container_type: str or ~azure_machine_learning_workspaces.models.ContainerType
+ :param tail: The maximum number of lines to tail.
+ :type tail: int
+ """
+
+ _attribute_map = {
+ 'container_type': {'key': 'containerType', 'type': 'str'},
+ 'tail': {'key': 'tail', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ container_type: Optional[Union[str, "ContainerType"]] = None,
+ tail: Optional[int] = None,
+ **kwargs
+ ):
+ super(DeploymentLogsRequest, self).__init__(**kwargs)
+ self.container_type = container_type
+ self.tail = tail
+
+
+class DistributionConfiguration(msrest.serialization.Model):
+ """DistributionConfiguration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Mpi, PyTorch, TensorFlow.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'TensorFlow': 'TensorFlow'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DistributionConfiguration, self).__init__(**kwargs)
+ self.distribution_type = None # type: Optional[str]
+
+
+class DockerSpecification(msrest.serialization.Model):
+ """Class to represent configuration settings for Docker.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DockerBuild, DockerImage.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ }
+
+ _subtype_map = {
+ 'docker_specification_type': {'Build': 'DockerBuild', 'Image': 'DockerImage'}
+ }
+
+ def __init__(
+ self,
+ *,
+ platform: Optional["DockerImagePlatform"] = None,
+ **kwargs
+ ):
+ super(DockerSpecification, self).__init__(**kwargs)
+ self.docker_specification_type = None # type: Optional[str]
+ self.platform = platform
+
+
+class DockerBuild(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param dockerfile: Required. Docker command line instructions to assemble an image.
+
+
+ .. raw:: html
+
+ .
+ :type dockerfile: str
+ :param context: Path to a snapshot of the Docker Context. This property is only valid if
+ Dockerfile is specified.
+ The path is relative to the asset path which must contain a single Blob URI value.
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path:code:``.
+ :type context: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'dockerfile': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'dockerfile': {'key': 'dockerfile', 'type': 'str'},
+ 'context': {'key': 'context', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ dockerfile: str,
+ platform: Optional["DockerImagePlatform"] = None,
+ context: Optional[str] = None,
+ **kwargs
+ ):
+ super(DockerBuild, self).__init__(platform=platform, **kwargs)
+ self.docker_specification_type = 'Build' # type: str
+ self.dockerfile = dockerfile
+ self.context = context
+
+
+class DockerImage(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param docker_image_uri: Required. Image name of a custom base image.
+
+
+ .. raw:: html
+
+ .
+ :type docker_image_uri: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'docker_image_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'docker_image_uri': {'key': 'dockerImageUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ docker_image_uri: str,
+ platform: Optional["DockerImagePlatform"] = None,
+ **kwargs
+ ):
+ super(DockerImage, self).__init__(platform=platform, **kwargs)
+ self.docker_specification_type = 'Image' # type: str
+ self.docker_image_uri = docker_image_uri
+
+
+class DockerImagePlatform(msrest.serialization.Model):
+ """DockerImagePlatform.
+
+ :param operating_system_type: The OS type the Environment. Possible values include: "Linux",
+ "Windows".
+ :type operating_system_type: str or
+ ~azure_machine_learning_workspaces.models.OperatingSystemType
+ """
+
+ _attribute_map = {
+ 'operating_system_type': {'key': 'operatingSystemType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ operating_system_type: Optional[Union[str, "OperatingSystemType"]] = None,
+ **kwargs
+ ):
+ super(DockerImagePlatform, self).__init__(**kwargs)
+ self.operating_system_type = operating_system_type
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Union[str, "EncryptionStatus"],
+ key_vault_properties: "KeyVaultProperties",
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = status
+ self.key_vault_properties = key_vault_properties
+
+
+class EndpointAuthKeys(msrest.serialization.Model):
+ """EndpointAuthKeys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_key: Optional[str] = None,
+ secondary_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(EndpointAuthKeys, self).__init__(**kwargs)
+ self.primary_key = primary_key
+ self.secondary_key = secondary_key
+
+
+class EndpointAuthToken(msrest.serialization.Model):
+ """Service Token.
+
+ :param access_token: Access token.
+ :type access_token: str
+ :param token_type: Access token type.
+ :type token_type: str
+ :param expiry_time_utc: Access token expiry time (UTC).
+ :type expiry_time_utc: long
+ :param refresh_after_time_utc: Refresh access token after time (UTC).
+ :type refresh_after_time_utc: long
+ """
+
+ _attribute_map = {
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'},
+ 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'},
+ }
+
+ def __init__(
+ self,
+ *,
+ access_token: Optional[str] = None,
+ token_type: Optional[str] = None,
+ expiry_time_utc: Optional[int] = None,
+ refresh_after_time_utc: Optional[int] = None,
+ **kwargs
+ ):
+ super(EndpointAuthToken, self).__init__(**kwargs)
+ self.access_token = access_token
+ self.token_type = token_type
+ self.expiry_time_utc = expiry_time_utc
+ self.refresh_after_time_utc = refresh_after_time_utc
+
+
+class EnvironmentContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Dictionary of :code:``.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param description:
+ :type description: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.properties = properties
+ self.tags = tags
+ self.description = description
+
+
+class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentContainer entities.
+
+ :param value: An array of objects of type EnvironmentContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.EnvironmentContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[EnvironmentContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["EnvironmentContainerResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class ModelEnvironmentDefinition(msrest.serialization.Model):
+ """ModelEnvironmentDefinition.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSection"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSection"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinition, self).__init__(**kwargs)
+ self.name = name
+ self.version = version
+ self.python = python
+ self.environment_variables = environment_variables
+ self.docker = docker
+ self.spark = spark
+ self.r = r
+ self.inferencing_stack_version = inferencing_stack_version
+
+
+class EnvironmentImageRequestEnvironment(ModelEnvironmentDefinition):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSection"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSection"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironment, self).__init__(name=name, version=version, python=python, environment_variables=environment_variables, docker=docker, spark=spark, r=r, inferencing_stack_version=inferencing_stack_version, **kwargs)
+
+
+class EnvironmentReference(msrest.serialization.Model):
+ """EnvironmentReference.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentReference, self).__init__(**kwargs)
+ self.name = name
+ self.version = version
+
+
+class EnvironmentImageRequestEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironmentReference, self).__init__(name=name, version=version, **kwargs)
+
+
+class ModelEnvironmentDefinitionResponse(msrest.serialization.Model):
+ """ModelEnvironmentDefinitionResponse.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSectionResponse"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSectionResponse"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponse, self).__init__(**kwargs)
+ self.name = name
+ self.version = version
+ self.python = python
+ self.environment_variables = environment_variables
+ self.docker = docker
+ self.spark = spark
+ self.r = r
+ self.inferencing_stack_version = inferencing_stack_version
+
+
+class EnvironmentImageResponseEnvironment(ModelEnvironmentDefinitionResponse):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSectionResponse"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSectionResponse"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironment, self).__init__(name=name, version=version, python=python, environment_variables=environment_variables, docker=docker, spark=spark, r=r, inferencing_stack_version=inferencing_stack_version, **kwargs)
+
+
+class EnvironmentImageResponseEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironmentReference, self).__init__(name=name, version=version, **kwargs)
+
+
+class EnvironmentSpecificationVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar environment_specification_type: Environment specification is either user managed or
+ curated by the Azure ML service
+
+
+ .. raw:: html
+
+ . Possible values include: "Curated", "UserCreated".
+ :vartype environment_specification_type: str or
+ ~azure_machine_learning_workspaces.models.EnvironmentSpecificationType
+ :param docker: Class to represent configuration settings for Docker.
+ :type docker: ~azure_machine_learning_workspaces.models.DockerSpecification
+ :param conda_file: Standard configuration file used by conda that lets you install any kind of
+ package, including Python, R, and C/C++ packages
+
+
+ .. raw:: html
+
+ .
+ :type conda_file: str
+ :param inference_container_properties: Defines configuration specific to inference.
+ :type inference_container_properties:
+ ~azure_machine_learning_workspaces.models.InferenceContainerProperties
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'environment_specification_type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'environment_specification_type': {'key': 'properties.environmentSpecificationType', 'type': 'str'},
+ 'docker': {'key': 'properties.docker', 'type': 'DockerSpecification'},
+ 'conda_file': {'key': 'properties.condaFile', 'type': 'str'},
+ 'inference_container_properties': {'key': 'properties.inferenceContainerProperties', 'type': 'InferenceContainerProperties'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ docker: Optional["DockerSpecification"] = None,
+ conda_file: Optional[str] = None,
+ inference_container_properties: Optional["InferenceContainerProperties"] = None,
+ generated_by: Optional[Union[str, "AssetGenerator"]] = None,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.environment_specification_type = None
+ self.docker = docker
+ self.conda_file = conda_file
+ self.inference_container_properties = inference_container_properties
+ self.generated_by = generated_by
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class EnvironmentSpecificationVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentSpecificationVersion entities.
+
+ :param value: An array of objects of type EnvironmentSpecificationVersion.
+ :type value:
+ list[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[EnvironmentSpecificationVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["EnvironmentSpecificationVersionResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """Error detail information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code: Required. Error code.
+ :type code: str
+ :param message: Required. Error message.
+ :type message: str
+ """
+
+ _validation = {
+ 'code': {'required': True},
+ 'message': {'required': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ code: str,
+ message: str,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = code
+ self.message = message
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ retail_price: float,
+ os_type: Union[str, "VmPriceOsType"],
+ vm_tier: Union[str, "VmTier"],
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = retail_price
+ self.os_type = os_type
+ self.vm_tier = vm_tier
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ billing_currency: Union[str, "BillingCurrency"],
+ unit_of_measure: Union[str, "UnitOfMeasure"],
+ values: List["EstimatedVmPrice"],
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = billing_currency
+ self.unit_of_measure = unit_of_measure
+ self.values = values
+
+
+class EvaluationConfiguration(msrest.serialization.Model):
+ """EvaluationConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param primary_metric_name: Required.
+ :type primary_metric_name: str
+ :param primary_metric_goal: Required. Defines supported metric goals for hyperparameter tuning.
+ Possible values include: "Minimize", "Maximize".
+ :type primary_metric_goal: str or ~azure_machine_learning_workspaces.models.PrimaryMetricGoal
+ """
+
+ _validation = {
+ 'primary_metric_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'primary_metric_goal': {'required': True},
+ }
+
+ _attribute_map = {
+ 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
+ 'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_metric_name: str,
+ primary_metric_goal: Union[str, "PrimaryMetricGoal"],
+ **kwargs
+ ):
+ super(EvaluationConfiguration, self).__init__(**kwargs)
+ self.primary_metric_name = primary_metric_name
+ self.primary_metric_goal = primary_metric_goal
+
+
+class ExperimentLimits(msrest.serialization.Model):
+ """Limit settings on AutoML Experiment.
+
+ :param max_trials: Number of iterations.
+ :type max_trials: int
+ :param experiment_timeout_in_minutes: Experiment Timeout.
+ :type experiment_timeout_in_minutes: int
+ :param max_concurrent_trials: Maximum Concurrent iterations.
+ :type max_concurrent_trials: int
+ :param max_cores_per_trial: Max cores per iteration.
+ :type max_cores_per_trial: int
+ """
+
+ _attribute_map = {
+ 'max_trials': {'key': 'maxTrials', 'type': 'int'},
+ 'experiment_timeout_in_minutes': {'key': 'experimentTimeoutInMinutes', 'type': 'int'},
+ 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
+ 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_trials: Optional[int] = None,
+ experiment_timeout_in_minutes: Optional[int] = None,
+ max_concurrent_trials: Optional[int] = None,
+ max_cores_per_trial: Optional[int] = None,
+ **kwargs
+ ):
+ super(ExperimentLimits, self).__init__(**kwargs)
+ self.max_trials = max_trials
+ self.experiment_timeout_in_minutes = experiment_timeout_in_minutes
+ self.max_concurrent_trials = max_concurrent_trials
+ self.max_cores_per_trial = max_cores_per_trial
+
+
+class FeaturizationSettings(msrest.serialization.Model):
+ """Featurization Configuration.
+
+ :param featurization_config: Featurization config json string.
+ :type featurization_config: str
+ :param enable_dnn_featurization: Enable Dnn featurization.
+ :type enable_dnn_featurization: bool
+ """
+
+ _attribute_map = {
+ 'featurization_config': {'key': 'featurizationConfig', 'type': 'str'},
+ 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ featurization_config: Optional[str] = None,
+ enable_dnn_featurization: Optional[bool] = None,
+ **kwargs
+ ):
+ super(FeaturizationSettings, self).__init__(**kwargs)
+ self.featurization_config = featurization_config
+ self.enable_dnn_featurization = enable_dnn_featurization
+
+
+class ForecastingSettings(msrest.serialization.Model):
+ """Forecasting specific parameters.
+
+ :param forecasting_country_or_region: Country or region for holidays for forecasting tasks.
+ These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'.
+ :type forecasting_country_or_region: str
+ :param time_column_name: Time column name.
+ :type time_column_name: str
+ :param target_lags: Target Lags.
+ :type target_lags: list[int]
+ :param target_rolling_window_size: Forecasting Window Size.
+ :type target_rolling_window_size: int
+ :param forecast_horizon: Forecasting Horizon.
+ :type forecast_horizon: int
+ :param time_series_id_column_names: Time series column names.
+ :type time_series_id_column_names: list[str]
+ :param enable_dnn_training: Enable recommendation of DNN models.
+ :type enable_dnn_training: bool
+ """
+
+ _attribute_map = {
+ 'forecasting_country_or_region': {'key': 'forecastingCountryOrRegion', 'type': 'str'},
+ 'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
+ 'target_lags': {'key': 'targetLags', 'type': '[int]'},
+ 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'int'},
+ 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'int'},
+ 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
+ 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ forecasting_country_or_region: Optional[str] = None,
+ time_column_name: Optional[str] = None,
+ target_lags: Optional[List[int]] = None,
+ target_rolling_window_size: Optional[int] = None,
+ forecast_horizon: Optional[int] = None,
+ time_series_id_column_names: Optional[List[str]] = None,
+ enable_dnn_training: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ForecastingSettings, self).__init__(**kwargs)
+ self.forecasting_country_or_region = forecasting_country_or_region
+ self.time_column_name = time_column_name
+ self.target_lags = target_lags
+ self.target_rolling_window_size = target_rolling_window_size
+ self.forecast_horizon = forecast_horizon
+ self.time_series_id_column_names = time_series_id_column_names
+ self.enable_dnn_training = enable_dnn_training
+
+
+class GeneralSettings(msrest.serialization.Model):
+ """General Settings to submit an AutoML Job.
+
+ :param primary_metric: Primary optimization metric. Possible values include: "AUC_weighted",
+ "Accuracy", "Norm_macro_recall", "Average_precision_score_weighted",
+ "Precision_score_weighted", "Spearman_correlation", "Normalized_root_mean_squared_error",
+ "R2_score", "Normalized_mean_absolute_error", "Normalized_root_mean_squared_log_error".
+ :type primary_metric: str or ~azure_machine_learning_workspaces.models.OptimizationMetric
+ :param enable_model_explainability: Flag to turn on explainability on best model.
+ :type enable_model_explainability: bool
+ :param task_type: Type of AutoML Experiment [Classification, Regression, Forecasting]. Possible
+ values include: "Classification", "Regression", "Forecasting".
+ :type task_type: str or ~azure_machine_learning_workspaces.models.TaskType
+ """
+
+ _attribute_map = {
+ 'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
+ 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
+ 'task_type': {'key': 'taskType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_metric: Optional[Union[str, "OptimizationMetric"]] = None,
+ enable_model_explainability: Optional[bool] = None,
+ task_type: Optional[Union[str, "TaskType"]] = None,
+ **kwargs
+ ):
+ super(GeneralSettings, self).__init__(**kwargs)
+ self.primary_metric = primary_metric
+ self.enable_model_explainability = enable_model_explainability
+ self.task_type = task_type
+
+
+class GlusterFsSection(msrest.serialization.Model):
+ """GlusterFsSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param server_address: Required. GlusterFS server address (can be the IP address or server
+ name).
+ :type server_address: str
+ :param volume_name: Required. GlusterFS volume name.
+ :type volume_name: str
+ """
+
+ _validation = {
+ 'server_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'volume_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'server_address': {'key': 'serverAddress', 'type': 'str'},
+ 'volume_name': {'key': 'volumeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ server_address: str,
+ volume_name: str,
+ **kwargs
+ ):
+ super(GlusterFsSection, self).__init__(**kwargs)
+ self.server_address = server_address
+ self.volume_name = volume_name
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["HdInsightProperties"] = None,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = properties
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+
+
+class IdAssetReference(AssetReferenceBase):
+ """IdAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param asset_id: Required.
+ :type asset_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ 'asset_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'asset_id': {'key': 'assetId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ asset_id: str,
+ **kwargs
+ ):
+ super(IdAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'Id' # type: str
+ self.asset_id = asset_id
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Optional[Union[str, "ResourceIdentityType"]] = None,
+ user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
+class ImageAsset(msrest.serialization.Model):
+ """An Image asset.
+
+ :param id: The Asset Id.
+ :type id: str
+ :param mime_type: The mime type.
+ :type mime_type: str
+ :param url: The Url of the Asset.
+ :type url: str
+ :param unpack: Whether the Asset is unpacked.
+ :type unpack: bool
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ mime_type: Optional[str] = None,
+ url: Optional[str] = None,
+ unpack: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ImageAsset, self).__init__(**kwargs)
+ self.id = id
+ self.mime_type = mime_type
+ self.url = url
+ self.unpack = unpack
+
+
+class InferenceContainerProperties(msrest.serialization.Model):
+ """InferenceContainerProperties.
+
+ :param liveness_route: The route to check the liveness of the inference server container.
+ :type liveness_route: ~azure_machine_learning_workspaces.models.Route
+ :param readiness_route: The route to check the readiness of the inference server container.
+ :type readiness_route: ~azure_machine_learning_workspaces.models.Route
+ :param scoring_route: The port to send the scoring requests to, within the inference server
+ container.
+ :type scoring_route: ~azure_machine_learning_workspaces.models.Route
+ """
+
+ _attribute_map = {
+ 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'},
+ 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'},
+ 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'},
+ }
+
+ def __init__(
+ self,
+ *,
+ liveness_route: Optional["Route"] = None,
+ readiness_route: Optional["Route"] = None,
+ scoring_route: Optional["Route"] = None,
+ **kwargs
+ ):
+ super(InferenceContainerProperties, self).__init__(**kwargs)
+ self.liveness_route = liveness_route
+ self.readiness_route = readiness_route
+ self.scoring_route = scoring_route
+
+
+class InputData(msrest.serialization.Model):
+ """InputData.
+
+ :param dataset_id: Dataset registration id.
+ :type dataset_id: str
+ :param mode: Mode type, can be set for DatasetId. Possible values include: "Mount", "Download",
+ "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ :param value: Literal Value of a data binding. Example "42".
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'dataset_id': {'key': 'datasetId', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ dataset_id: Optional[str] = None,
+ mode: Optional[Union[str, "DataBindingMode"]] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(InputData, self).__init__(**kwargs)
+ self.dataset_id = dataset_id
+ self.mode = mode
+ self.value = value
+
+
+class JobBaseInteractionEndpoints(msrest.serialization.Model):
+ """Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+For local jobs, a job endpoint will have a value of FileStreamObject.
+
+ :param tracking:
+ :type tracking: str
+ :param studio:
+ :type studio: str
+ :param grafana:
+ :type grafana: str
+ :param tensorboard:
+ :type tensorboard: str
+ :param local:
+ :type local: str
+ """
+
+ _attribute_map = {
+ 'tracking': {'key': 'Tracking', 'type': 'str'},
+ 'studio': {'key': 'Studio', 'type': 'str'},
+ 'grafana': {'key': 'Grafana', 'type': 'str'},
+ 'tensorboard': {'key': 'Tensorboard', 'type': 'str'},
+ 'local': {'key': 'Local', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tracking: Optional[str] = None,
+ studio: Optional[str] = None,
+ grafana: Optional[str] = None,
+ tensorboard: Optional[str] = None,
+ local: Optional[str] = None,
+ **kwargs
+ ):
+ super(JobBaseInteractionEndpoints, self).__init__(**kwargs)
+ self.tracking = tracking
+ self.studio = studio
+ self.grafana = grafana
+ self.tensorboard = tensorboard
+ self.local = local
+
+
+class JobBaseResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :param properties: Required. Job base definition.
+ :type properties: ~azure_machine_learning_workspaces.models.JobBase
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'JobBase'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "JobBase",
+ **kwargs
+ ):
+ super(JobBaseResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.properties = properties
+ self.system_data = None
+
+
+class JobBaseResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of JobBase entities.
+
+ :param value: An array of objects of type JobBase.
+ :type value: list[~azure_machine_learning_workspaces.models.JobBaseResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[JobBaseResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["JobBaseResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class JobOutput(msrest.serialization.Model):
+ """JobOutput.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar datastore_id: ARM ID of the datastore where the job logs and artifacts are stored, or
+ null for the default container ("azureml") in the workspace's storage account.
+ :vartype datastore_id: str
+ :ivar path: Path within the datastore to the job logs and artifacts.
+ :vartype path: str
+ """
+
+ _validation = {
+ 'datastore_id': {'readonly': True},
+ 'path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobOutput, self).__init__(**kwargs)
+ self.datastore_id = None
+ self.path = None
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key_vault_arm_id: str,
+ key_identifier: str,
+ identity_client_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = key_vault_arm_id
+ self.key_identifier = key_identifier
+ self.identity_client_id = identity_client_id
+
+
+class LabelCategory(msrest.serialization.Model):
+ """Label category definition.
+
+ :param display_name: Display name of the label category.
+ :type display_name: str
+ :param allow_multi_select: Indicates whether it is allowed to select multiple classes in this
+ category.
+ :type allow_multi_select: bool
+ :param classes: Dictionary of label classes in this category.
+ :type classes: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'allow_multi_select': {'key': 'allowMultiSelect', 'type': 'bool'},
+ 'classes': {'key': 'classes', 'type': '{LabelClass}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ allow_multi_select: Optional[bool] = None,
+ classes: Optional[Dict[str, "LabelClass"]] = None,
+ **kwargs
+ ):
+ super(LabelCategory, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.allow_multi_select = allow_multi_select
+ self.classes = classes
+
+
+class LabelClass(msrest.serialization.Model):
+ """Label class definition.
+
+ :param display_name: Display name of the label class.
+ :type display_name: str
+ :param subclasses: Dictionary of subclasses of the label class.
+ :type subclasses: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ subclasses: Optional[Dict[str, "LabelClass"]] = None,
+ **kwargs
+ ):
+ super(LabelClass, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.subclasses = subclasses
+
+
+class LabelingDatasetConfiguration(msrest.serialization.Model):
+ """Labeling dataset configuration definition.
+
+ :param asset_name: Name of the data asset to perform labeling.
+ :type asset_name: str
+ :param incremental_dataset_refresh_enabled: Indicates whether to enable incremental dataset
+ refresh.
+ :type incremental_dataset_refresh_enabled: bool
+ :param dataset_version: AML dataset version.
+ :type dataset_version: str
+ """
+
+ _attribute_map = {
+ 'asset_name': {'key': 'assetName', 'type': 'str'},
+ 'incremental_dataset_refresh_enabled': {'key': 'incrementalDatasetRefreshEnabled', 'type': 'bool'},
+ 'dataset_version': {'key': 'datasetVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ asset_name: Optional[str] = None,
+ incremental_dataset_refresh_enabled: Optional[bool] = None,
+ dataset_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(LabelingDatasetConfiguration, self).__init__(**kwargs)
+ self.asset_name = asset_name
+ self.incremental_dataset_refresh_enabled = incremental_dataset_refresh_enabled
+ self.dataset_version = dataset_version
+
+
+class LabelingJob(JobBase):
+ """Labeling job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param label_categories: Label categories of the job.
+ :type label_categories: dict[str, ~azure_machine_learning_workspaces.models.LabelCategory]
+ :param job_instructions: Labeling instructions of the job.
+ :type job_instructions: ~azure_machine_learning_workspaces.models.LabelingJobInstructions
+ :param dataset_configuration: Configuration of dataset used in the job.
+ :type dataset_configuration:
+ ~azure_machine_learning_workspaces.models.LabelingDatasetConfiguration
+ :param ml_assist_configuration: Configuration of MLAssist feature in the job.
+ :type ml_assist_configuration: ~azure_machine_learning_workspaces.models.MlAssistConfiguration
+ :param labeling_job_media_properties: Properties of a labeling job.
+ :type labeling_job_media_properties:
+ ~azure_machine_learning_workspaces.models.LabelingJobMediaProperties
+ :ivar project_id: Internal id of the job(Previously called project).
+ :vartype project_id: str
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :ivar progress_metrics: Progress metrics of the job.
+ :vartype progress_metrics: ~azure_machine_learning_workspaces.models.ProgressMetrics
+ :ivar status_messages: Status messages of the job.
+ :vartype status_messages: list[~azure_machine_learning_workspaces.models.StatusMessage]
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'project_id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'progress_metrics': {'readonly': True},
+ 'status_messages': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'},
+ 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'},
+ 'dataset_configuration': {'key': 'datasetConfiguration', 'type': 'LabelingDatasetConfiguration'},
+ 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MlAssistConfiguration'},
+ 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'},
+ 'project_id': {'key': 'projectId', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'},
+ 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ label_categories: Optional[Dict[str, "LabelCategory"]] = None,
+ job_instructions: Optional["LabelingJobInstructions"] = None,
+ dataset_configuration: Optional["LabelingDatasetConfiguration"] = None,
+ ml_assist_configuration: Optional["MlAssistConfiguration"] = None,
+ labeling_job_media_properties: Optional["LabelingJobMediaProperties"] = None,
+ **kwargs
+ ):
+ super(LabelingJob, self).__init__(description=description, tags=tags, properties=properties, **kwargs)
+ self.job_type = 'Labeling' # type: str
+ self.label_categories = label_categories
+ self.job_instructions = job_instructions
+ self.dataset_configuration = dataset_configuration
+ self.ml_assist_configuration = ml_assist_configuration
+ self.labeling_job_media_properties = labeling_job_media_properties
+ self.project_id = None
+ self.status = None
+ self.progress_metrics = None
+ self.status_messages = None
+
+
+class LabelingJobMediaProperties(msrest.serialization.Model):
+ """Properties of a labeling job.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobMediaProperties, self).__init__(**kwargs)
+ self.media_type = None # type: Optional[str]
+
+
+class LabelingJobImageProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for image data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of image labeling job. Possible values include:
+ "Classification", "BoundingBox", "InstanceSegmentation".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.ImageAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ annotation_type: Optional[Union[str, "ImageAnnotationType"]] = None,
+ **kwargs
+ ):
+ super(LabelingJobImageProperties, self).__init__(**kwargs)
+ self.media_type = 'Image' # type: str
+ self.annotation_type = annotation_type
+
+
+class LabelingJobInstructions(msrest.serialization.Model):
+ """Instructions for labeling job.
+
+ :param uri: The link to a page with detailed labeling instructions for labelers.
+ :type uri: str
+ """
+
+ _attribute_map = {
+ 'uri': {'key': 'uri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ uri: Optional[str] = None,
+ **kwargs
+ ):
+ super(LabelingJobInstructions, self).__init__(**kwargs)
+ self.uri = uri
+
+
+class LabelingJobResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param label_categories: Label categories of the job.
+ :type label_categories: dict[str, ~azure_machine_learning_workspaces.models.LabelCategory]
+ :param job_instructions: Labeling instructions of the job.
+ :type job_instructions: ~azure_machine_learning_workspaces.models.LabelingJobInstructions
+ :param dataset_configuration: Configuration of dataset used in the job.
+ :type dataset_configuration:
+ ~azure_machine_learning_workspaces.models.LabelingDatasetConfiguration
+ :param ml_assist_configuration: Configuration of MLAssist feature in the job.
+ :type ml_assist_configuration: ~azure_machine_learning_workspaces.models.MlAssistConfiguration
+ :param labeling_job_media_properties: Properties of a labeling job.
+ :type labeling_job_media_properties:
+ ~azure_machine_learning_workspaces.models.LabelingJobMediaProperties
+ :ivar project_id: Internal id of the job(Previously called project).
+ :vartype project_id: str
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :ivar progress_metrics: Progress metrics of the job.
+ :vartype progress_metrics: ~azure_machine_learning_workspaces.models.ProgressMetrics
+ :ivar status_messages: Status messages of the job.
+ :vartype status_messages: list[~azure_machine_learning_workspaces.models.StatusMessage]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'project_id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'progress_metrics': {'readonly': True},
+ 'status_messages': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'job_type': {'key': 'properties.jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'properties.interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'label_categories': {'key': 'properties.labelCategories', 'type': '{LabelCategory}'},
+ 'job_instructions': {'key': 'properties.jobInstructions', 'type': 'LabelingJobInstructions'},
+ 'dataset_configuration': {'key': 'properties.datasetConfiguration', 'type': 'LabelingDatasetConfiguration'},
+ 'ml_assist_configuration': {'key': 'properties.mlAssistConfiguration', 'type': 'MlAssistConfiguration'},
+ 'labeling_job_media_properties': {'key': 'properties.labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'},
+ 'project_id': {'key': 'properties.projectId', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ 'progress_metrics': {'key': 'properties.progressMetrics', 'type': 'ProgressMetrics'},
+ 'status_messages': {'key': 'properties.statusMessages', 'type': '[StatusMessage]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ label_categories: Optional[Dict[str, "LabelCategory"]] = None,
+ job_instructions: Optional["LabelingJobInstructions"] = None,
+ dataset_configuration: Optional["LabelingDatasetConfiguration"] = None,
+ ml_assist_configuration: Optional["MlAssistConfiguration"] = None,
+ labeling_job_media_properties: Optional["LabelingJobMediaProperties"] = None,
+ **kwargs
+ ):
+ super(LabelingJobResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.job_type = None # type: Optional[str]
+ self.provisioning_state = None
+ self.interaction_endpoints = None
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+ self.label_categories = label_categories
+ self.job_instructions = job_instructions
+ self.dataset_configuration = dataset_configuration
+ self.ml_assist_configuration = ml_assist_configuration
+ self.labeling_job_media_properties = labeling_job_media_properties
+ self.project_id = None
+ self.status = None
+ self.progress_metrics = None
+ self.status_messages = None
+
+
+class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of LabelingJob entities.
+
+ :param value: An array of objects of type LabelingJob.
+ :type value: list[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[LabelingJobResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["LabelingJobResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class LabelingJobTextProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for text data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of text labeling job. Possible values include:
+ "Classification".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.TextAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ annotation_type: Optional[Union[str, "TextAnnotationType"]] = None,
+ **kwargs
+ ):
+ super(LabelingJobTextProperties, self).__init__(**kwargs)
+ self.media_type = 'Text' # type: str
+ self.annotation_type = annotation_type
+
+
+class LinkedInfo(msrest.serialization.Model):
+ """LinkedInfo.
+
+ :param linked_id: Linked service ID.
+ :type linked_id: str
+ :param linked_resource_name: Linked service resource name.
+ :type linked_resource_name: str
+ :param origin: Type of the linked service. Possible values include: "Synapse".
+ :type origin: str or ~azure_machine_learning_workspaces.models.OriginType
+ """
+
+ _attribute_map = {
+ 'linked_id': {'key': 'linkedId', 'type': 'str'},
+ 'linked_resource_name': {'key': 'linkedResourceName', 'type': 'str'},
+ 'origin': {'key': 'origin', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ linked_id: Optional[str] = None,
+ linked_resource_name: Optional[str] = None,
+ origin: Optional[Union[str, "OriginType"]] = None,
+ **kwargs
+ ):
+ super(LinkedInfo, self).__init__(**kwargs)
+ self.linked_id = linked_id
+ self.linked_resource_name = linked_resource_name
+ self.origin = origin
+
+
+class LinkedServiceList(msrest.serialization.Model):
+ """List response of linked service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: Array of linked service.
+ :vartype value: list[~azure_machine_learning_workspaces.models.LinkedServiceResponse]
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[LinkedServiceResponse]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LinkedServiceList, self).__init__(**kwargs)
+ self.value = None
+
+
+class LinkedServiceProps(msrest.serialization.Model):
+ """LinkedService specific properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param linked_service_resource_id: Required. ResourceId of the link target of the linked
+ service.
+ :type linked_service_resource_id: str
+ :ivar link_type: Type of the link target. Default value: "Synapse".
+ :vartype link_type: str
+ :param created_time: The creation time of the linked service.
+ :type created_time: ~datetime.datetime
+ :param modified_time: The last modified time of the linked service.
+ :type modified_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'linked_service_resource_id': {'required': True},
+ 'link_type': {'constant': True},
+ }
+
+ _attribute_map = {
+ 'linked_service_resource_id': {'key': 'linkedServiceResourceId', 'type': 'str'},
+ 'link_type': {'key': 'linkType', 'type': 'str'},
+ 'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+ 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+ }
+
+ link_type = "Synapse"
+
+ def __init__(
+ self,
+ *,
+ linked_service_resource_id: str,
+ created_time: Optional[datetime.datetime] = None,
+ modified_time: Optional[datetime.datetime] = None,
+ **kwargs
+ ):
+ super(LinkedServiceProps, self).__init__(**kwargs)
+ self.linked_service_resource_id = linked_service_resource_id
+ self.created_time = created_time
+ self.modified_time = modified_time
+
+
+class LinkedServiceRequest(msrest.serialization.Model):
+ """object used for creating linked service.
+
+ :param name: Friendly name of the linked service.
+ :type name: str
+ :param location: location of the linked service.
+ :type location: str
+ :param identity: Identity for the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param properties: LinkedService specific properties.
+ :type properties: ~azure_machine_learning_workspaces.models.LinkedServiceProps
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'properties': {'key': 'properties', 'type': 'LinkedServiceProps'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ location: Optional[str] = None,
+ identity: Optional["Identity"] = None,
+ properties: Optional["LinkedServiceProps"] = None,
+ **kwargs
+ ):
+ super(LinkedServiceRequest, self).__init__(**kwargs)
+ self.name = name
+ self.location = location
+ self.identity = identity
+ self.properties = properties
+
+
+class LinkedServiceResponse(msrest.serialization.Model):
+ """Linked service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the link of the linked service.
+ :vartype id: str
+ :ivar name: Friendly name of the linked service.
+ :vartype name: str
+ :ivar type: Resource type of linked service.
+ :vartype type: str
+ :param location: location of the linked service.
+ :type location: str
+ :param identity: Identity for the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param properties: LinkedService specific properties.
+ :type properties: ~azure_machine_learning_workspaces.models.LinkedServiceProps
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'properties': {'key': 'properties', 'type': 'LinkedServiceProps'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: Optional[str] = None,
+ identity: Optional["Identity"] = None,
+ properties: Optional["LinkedServiceProps"] = None,
+ **kwargs
+ ):
+ super(LinkedServiceResponse, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.location = location
+ self.identity = identity
+ self.properties = properties
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class MachineLearningServiceError(msrest.serialization.Model):
+ """Wrapper for error response to follow ARM guidelines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MachineLearningServiceError, self).__init__(**kwargs)
+ self.error = None
+
+
+class ManagedComputeConfiguration(ComputeConfiguration):
+ """ManagedComputeConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedComputeConfiguration, self).__init__(**kwargs)
+ self.compute_type = 'Managed' # type: str
+
+
+class ManagedDeploymentConfiguration(DeploymentConfigurationBase):
+ """ManagedDeploymentConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. Constant filled by server. Possible values include: "Managed",
+ "AKS", "AzureMLCompute".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param app_insights_enabled:
+ :type app_insights_enabled: bool
+ :param max_concurrent_requests_per_instance:
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait_ms:
+ :type max_queue_wait_ms: int
+ :param scoring_timeout_ms:
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param instance_type:
+ :type instance_type: str
+ :param os_type: Possible values include: "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsTypes
+ :param readiness_probe_requirements: The liveness probe requirements.
+ :type readiness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'instance_type': {'key': 'instanceType', 'type': 'str'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'readiness_probe_requirements': {'key': 'readinessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ max_concurrent_requests_per_instance: Optional[int] = None,
+ max_queue_wait_ms: Optional[int] = None,
+ scoring_timeout_ms: Optional[int] = None,
+ liveness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ instance_type: Optional[str] = None,
+ os_type: Optional[Union[str, "OsTypes"]] = None,
+ readiness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ **kwargs
+ ):
+ super(ManagedDeploymentConfiguration, self).__init__(app_insights_enabled=app_insights_enabled, max_concurrent_requests_per_instance=max_concurrent_requests_per_instance, max_queue_wait_ms=max_queue_wait_ms, scoring_timeout_ms=scoring_timeout_ms, liveness_probe_requirements=liveness_probe_requirements, **kwargs)
+ self.compute_type = 'Managed' # type: str
+ self.instance_type = instance_type
+ self.os_type = os_type
+ self.readiness_probe_requirements = readiness_probe_requirements
+
+
+class ManagedIdentityConfiguration(IdentityConfiguration):
+ """ManagedIdentityConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not
+ set this field.
+ :type client_id: str
+ :param object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not
+ set this field.
+ :type object_id: str
+ :param msi_resource_id: Specifies a user-assigned identity by resource ID. For system-assigned,
+ do not set this field.
+ :type msi_resource_id: str
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: Optional[str] = None,
+ object_id: Optional[str] = None,
+ msi_resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(ManagedIdentityConfiguration, self).__init__(**kwargs)
+ self.identity_type = 'Managed' # type: str
+ self.client_id = client_id
+ self.object_id = object_id
+ self.msi_resource_id = msi_resource_id
+
+
+class MedianStoppingPolicyConfiguration(EarlyTerminationPolicyConfiguration):
+ """Defines an early termination policy based on running averages of the primary metric of all runs.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ evaluation_interval: Optional[int] = None,
+ delay_evaluation: Optional[int] = None,
+ **kwargs
+ ):
+ super(MedianStoppingPolicyConfiguration, self).__init__(evaluation_interval=evaluation_interval, delay_evaluation=delay_evaluation, **kwargs)
+ self.policy_type = 'MedianStopping' # type: str
+
+
+class MlAssistConfiguration(msrest.serialization.Model):
+ """Labeling MLAssist configuration definition.
+
+ :param inferencing_compute_binding: AML compute binding used in inferencing.
+ :type inferencing_compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :param training_compute_binding: AML compute binding used in training.
+ :type training_compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :param ml_assist_enabled: Indicates whether MLAssist feature is enabled.
+ :type ml_assist_enabled: bool
+ """
+
+ _attribute_map = {
+ 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'ComputeBinding'},
+ 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'ComputeBinding'},
+ 'ml_assist_enabled': {'key': 'mlAssistEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ inferencing_compute_binding: Optional["ComputeBinding"] = None,
+ training_compute_binding: Optional["ComputeBinding"] = None,
+ ml_assist_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(MlAssistConfiguration, self).__init__(**kwargs)
+ self.inferencing_compute_binding = inferencing_compute_binding
+ self.training_compute_binding = training_compute_binding
+ self.ml_assist_enabled = ml_assist_enabled
+
+
+class Model(msrest.serialization.Model):
+ """An Azure Machine Learning Model.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: The Model Id.
+ :type id: str
+ :param name: Required. The Model name.
+ :type name: str
+ :param framework: The Model framework.
+ :type framework: str
+ :param framework_version: The Model framework version.
+ :type framework_version: str
+ :param version: The Model version assigned by Model Management Service.
+ :type version: long
+ :param datasets: The list of datasets associated with the model.
+ :type datasets: list[~azure_machine_learning_workspaces.models.DatasetReference]
+ :param url: Required. The URL of the Model. Usually a SAS URL.
+ :type url: str
+ :param mime_type: Required. The MIME type of Model content. For more details about MIME type,
+ please open https://www.iana.org/assignments/media-types/media-types.xhtml.
+ :type mime_type: str
+ :param description: The Model description text.
+ :type description: str
+ :param created_time: The Model creation time (UTC).
+ :type created_time: ~datetime.datetime
+ :param modified_time: The Model last modified time (UTC).
+ :type modified_time: ~datetime.datetime
+ :param unpack: Indicates whether we need to unpack the Model during docker Image creation.
+ :type unpack: bool
+ :param parent_model_id: The Parent Model Id.
+ :type parent_model_id: str
+ :param run_id: The RunId that created this model.
+ :type run_id: str
+ :param experiment_name: The name of the experiment where this model was created.
+ :type experiment_name: str
+ :param kv_tags: The Model tag dictionary. Items are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The Model property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param derived_model_ids: Models derived from this model.
+ :type derived_model_ids: list[str]
+ :param sample_input_data: Sample Input Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_input_data: str
+ :param sample_output_data: Sample Output Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_output_data: str
+ :param resource_requirements: Resource requirements for the model.
+ :type resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ 'url': {'required': True},
+ 'mime_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'framework': {'key': 'framework', 'type': 'str'},
+ 'framework_version': {'key': 'frameworkVersion', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'long'},
+ 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+ 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ 'parent_model_id': {'key': 'parentModelId', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'derived_model_ids': {'key': 'derivedModelIds', 'type': '[str]'},
+ 'sample_input_data': {'key': 'sampleInputData', 'type': 'str'},
+ 'sample_output_data': {'key': 'sampleOutputData', 'type': 'str'},
+ 'resource_requirements': {'key': 'resourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ url: str,
+ mime_type: str,
+ id: Optional[str] = None,
+ framework: Optional[str] = None,
+ framework_version: Optional[str] = None,
+ version: Optional[int] = None,
+ datasets: Optional[List["DatasetReference"]] = None,
+ description: Optional[str] = None,
+ created_time: Optional[datetime.datetime] = None,
+ modified_time: Optional[datetime.datetime] = None,
+ unpack: Optional[bool] = None,
+ parent_model_id: Optional[str] = None,
+ run_id: Optional[str] = None,
+ experiment_name: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ derived_model_ids: Optional[List[str]] = None,
+ sample_input_data: Optional[str] = None,
+ sample_output_data: Optional[str] = None,
+ resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ **kwargs
+ ):
+ super(Model, self).__init__(**kwargs)
+ self.id = id
+ self.name = name
+ self.framework = framework
+ self.framework_version = framework_version
+ self.version = version
+ self.datasets = datasets
+ self.url = url
+ self.mime_type = mime_type
+ self.description = description
+ self.created_time = created_time
+ self.modified_time = modified_time
+ self.unpack = unpack
+ self.parent_model_id = parent_model_id
+ self.run_id = run_id
+ self.experiment_name = experiment_name
+ self.kv_tags = kv_tags
+ self.properties = properties
+ self.derived_model_ids = derived_model_ids
+ self.sample_input_data = sample_input_data
+ self.sample_output_data = sample_output_data
+ self.resource_requirements = resource_requirements
+
+
+class ModelContainerResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar latest_versions: Latest model versions for each stage. Key is the model stage, value is
+ the model version ARM ID.
+ :vartype latest_versions: dict[str, str]
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'latest_versions': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'latest_versions': {'key': 'properties.latestVersions', 'type': '{str}'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(ModelContainerResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.latest_versions = None
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelContainer entities.
+
+ :param value: An array of objects of type ModelContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelContainerResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ModelContainerResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ModelContainerResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class ModelDockerSection(msrest.serialization.Model):
+ """ModelDockerSection.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistry"] = None,
+ **kwargs
+ ):
+ super(ModelDockerSection, self).__init__(**kwargs)
+ self.base_image = base_image
+ self.base_dockerfile = base_dockerfile
+ self.base_image_registry = base_image_registry
+
+
+class ModelDockerSectionBaseImageRegistry(ContainerRegistry):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelDockerSectionBaseImageRegistry, self).__init__(address=address, username=username, password=password, **kwargs)
+
+
+class ModelDockerSectionResponse(msrest.serialization.Model):
+ """ModelDockerSectionResponse.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistryResponse"] = None,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponse, self).__init__(**kwargs)
+ self.base_image = base_image
+ self.base_dockerfile = base_dockerfile
+ self.base_image_registry = base_image_registry
+
+
+class ModelDockerSectionResponseBaseImageRegistry(ContainerRegistryResponse):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponseBaseImageRegistry, self).__init__(address=address, **kwargs)
+
+
+class ModelEnvironmentDefinitionDocker(ModelDockerSection):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistry"] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionDocker, self).__init__(base_image=base_image, base_dockerfile=base_dockerfile, base_image_registry=base_image_registry, **kwargs)
+
+
+class ModelPythonSection(msrest.serialization.Model):
+ """ModelPythonSection.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interpreter_path: Optional[str] = None,
+ user_managed_dependencies: Optional[bool] = None,
+ conda_dependencies: Optional[object] = None,
+ base_conda_environment: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelPythonSection, self).__init__(**kwargs)
+ self.interpreter_path = interpreter_path
+ self.user_managed_dependencies = user_managed_dependencies
+ self.conda_dependencies = conda_dependencies
+ self.base_conda_environment = base_conda_environment
+
+
+class ModelEnvironmentDefinitionPython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interpreter_path: Optional[str] = None,
+ user_managed_dependencies: Optional[bool] = None,
+ conda_dependencies: Optional[object] = None,
+ base_conda_environment: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionPython, self).__init__(interpreter_path=interpreter_path, user_managed_dependencies=user_managed_dependencies, conda_dependencies=conda_dependencies, base_conda_environment=base_conda_environment, **kwargs)
+
+
+class RSection(msrest.serialization.Model):
+ """RSection.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackage"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(RSection, self).__init__(**kwargs)
+ self.r_version = r_version
+ self.user_managed = user_managed
+ self.rscript_path = rscript_path
+ self.snapshot_date = snapshot_date
+ self.cran_packages = cran_packages
+ self.git_hub_packages = git_hub_packages
+ self.custom_url_packages = custom_url_packages
+ self.bio_conductor_packages = bio_conductor_packages
+
+
+class ModelEnvironmentDefinitionR(RSection):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackage"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionR, self).__init__(r_version=r_version, user_managed=user_managed, rscript_path=rscript_path, snapshot_date=snapshot_date, cran_packages=cran_packages, git_hub_packages=git_hub_packages, custom_url_packages=custom_url_packages, bio_conductor_packages=bio_conductor_packages, **kwargs)
+
+
+class ModelEnvironmentDefinitionResponseDocker(ModelDockerSectionResponse):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistryResponse"] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseDocker, self).__init__(base_image=base_image, base_dockerfile=base_dockerfile, base_image_registry=base_image_registry, **kwargs)
+
+
+class ModelEnvironmentDefinitionResponsePython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interpreter_path: Optional[str] = None,
+ user_managed_dependencies: Optional[bool] = None,
+ conda_dependencies: Optional[object] = None,
+ base_conda_environment: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponsePython, self).__init__(interpreter_path=interpreter_path, user_managed_dependencies=user_managed_dependencies, conda_dependencies=conda_dependencies, base_conda_environment=base_conda_environment, **kwargs)
+
+
+class RSectionResponse(msrest.serialization.Model):
+ """RSectionResponse.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackageResponse"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(RSectionResponse, self).__init__(**kwargs)
+ self.r_version = r_version
+ self.user_managed = user_managed
+ self.rscript_path = rscript_path
+ self.snapshot_date = snapshot_date
+ self.cran_packages = cran_packages
+ self.git_hub_packages = git_hub_packages
+ self.custom_url_packages = custom_url_packages
+ self.bio_conductor_packages = bio_conductor_packages
+
+
+class ModelEnvironmentDefinitionResponseR(RSectionResponse):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackageResponse"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseR, self).__init__(r_version=r_version, user_managed=user_managed, rscript_path=rscript_path, snapshot_date=snapshot_date, cran_packages=cran_packages, git_hub_packages=git_hub_packages, custom_url_packages=custom_url_packages, bio_conductor_packages=bio_conductor_packages, **kwargs)
+
+
+class ModelSparkSection(msrest.serialization.Model):
+ """ModelSparkSection.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repositories: Optional[List[str]] = None,
+ packages: Optional[List["SparkMavenPackage"]] = None,
+ precache_packages: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelSparkSection, self).__init__(**kwargs)
+ self.repositories = repositories
+ self.packages = packages
+ self.precache_packages = precache_packages
+
+
+class ModelEnvironmentDefinitionResponseSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repositories: Optional[List[str]] = None,
+ packages: Optional[List["SparkMavenPackage"]] = None,
+ precache_packages: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseSpark, self).__init__(repositories=repositories, packages=packages, precache_packages=precache_packages, **kwargs)
+
+
+class ModelEnvironmentDefinitionSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repositories: Optional[List[str]] = None,
+ packages: Optional[List["SparkMavenPackage"]] = None,
+ precache_packages: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionSpark, self).__init__(repositories=repositories, packages=packages, precache_packages=precache_packages, **kwargs)
+
+
+class ModelVersionResource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param stage: Model asset stage.
+ :type stage: str
+ :param flavors: Dictionary mapping model flavors to their properties.
+ :type flavors: dict[str, object]
+ :param datastore_id: The asset datastoreId.
+ :type datastore_id: str
+ :param asset_path: DEPRECATED - use
+ Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead.
+ :type asset_path: ~azure_machine_learning_workspaces.models.AssetPath
+ :param path: The path of the file/directory.
+ :type path: str
+ :param generated_by: If the name version are system generated (anonymous registration) or user
+ generated. Possible values include: "User", "System".
+ :type generated_by: str or ~azure_machine_learning_workspaces.models.AssetGenerator
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'stage': {'key': 'properties.stage', 'type': 'str'},
+ 'flavors': {'key': 'properties.flavors', 'type': '{object}'},
+ 'datastore_id': {'key': 'properties.datastoreId', 'type': 'str'},
+ 'asset_path': {'key': 'properties.assetPath', 'type': 'AssetPath'},
+ 'path': {'key': 'properties.path', 'type': 'str'},
+ 'generated_by': {'key': 'properties.generatedBy', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'tags': {'key': 'properties.tags', 'type': '{str}'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ stage: Optional[str] = None,
+ flavors: Optional[Dict[str, object]] = None,
+ datastore_id: Optional[str] = None,
+ asset_path: Optional["AssetPath"] = None,
+ path: Optional[str] = None,
+ generated_by: Optional[Union[str, "AssetGenerator"]] = None,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(ModelVersionResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.stage = stage
+ self.flavors = flavors
+ self.datastore_id = datastore_id
+ self.asset_path = asset_path
+ self.path = path
+ self.generated_by = generated_by
+ self.description = description
+ self.tags = tags
+ self.properties = properties
+
+
+class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelVersion entities.
+
+ :param value: An array of objects of type ModelVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelVersionResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ModelVersionResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ModelVersionResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class Mpi(DistributionConfiguration):
+ """Mpi.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count_per_instance:
+ :type process_count_per_instance: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ process_count_per_instance: Optional[int] = None,
+ **kwargs
+ ):
+ super(Mpi, self).__init__(**kwargs)
+ self.distribution_type = 'Mpi' # type: str
+ self.process_count_per_instance = process_count_per_instance
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookListCredentialsResult(msrest.serialization.Model):
+ """NotebookListCredentialsResult.
+
+ :param primary_access_key:
+ :type primary_access_key: str
+ :param secondary_access_key:
+ :type secondary_access_key: str
+ """
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_access_key: Optional[str] = None,
+ secondary_access_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(NotebookListCredentialsResult, self).__init__(**kwargs)
+ self.primary_access_key = primary_access_key
+ self.secondary_access_key = secondary_access_key
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error_message: Optional[str] = None,
+ status_code: Optional[int] = None,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = error_message
+ self.status_code = status_code
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ fqdn: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ notebook_preparation_error: Optional["NotebookPreparationError"] = None,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = fqdn
+ self.resource_id = resource_id
+ self.notebook_preparation_error = notebook_preparation_error
+
+
+class OnlineDeploymentScaleSettings(msrest.serialization.Model):
+ """OnlineDeploymentScaleSettings.
+
+ :param minimum:
+ :type minimum: int
+ :param maximum:
+ :type maximum: int
+ :param instance_count:
+ :type instance_count: int
+ :param scale_type: Possible values include: "Automatic", "Manual", "None".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleTypeMode
+ """
+
+ _attribute_map = {
+ 'minimum': {'key': 'minimum', 'type': 'int'},
+ 'maximum': {'key': 'maximum', 'type': 'int'},
+ 'instance_count': {'key': 'instanceCount', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ minimum: Optional[int] = None,
+ maximum: Optional[int] = None,
+ instance_count: Optional[int] = None,
+ scale_type: Optional[Union[str, "ScaleTypeMode"]] = None,
+ **kwargs
+ ):
+ super(OnlineDeploymentScaleSettings, self).__init__(**kwargs)
+ self.minimum = minimum
+ self.maximum = maximum
+ self.instance_count = instance_count
+ self.scale_type = scale_type
+
+
+class OnlineDeploymentTrackedResource(msrest.serialization.Model):
+ """OnlineDeploymentTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location: Required.
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param scale_settings:
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineDeploymentScaleSettings
+ :param deployment_configuration: Required.
+ :type deployment_configuration:
+ ~azure_machine_learning_workspaces.models.DeploymentConfigurationBase
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :param model_reference: Required.
+ :type model_reference: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param environment_id: Environment specification for the endpoint deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ """
+
+ _validation = {
+ 'location': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'deployment_configuration': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'model_reference': {'required': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'OnlineDeploymentScaleSettings'},
+ 'deployment_configuration': {'key': 'properties.deploymentConfiguration', 'type': 'DeploymentConfigurationBase'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'model_reference': {'key': 'properties.modelReference', 'type': 'AssetReferenceBase'},
+ 'code_configuration': {'key': 'properties.codeConfiguration', 'type': 'CodeConfiguration'},
+ 'environment_id': {'key': 'properties.environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'properties.environmentVariables', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: str,
+ deployment_configuration: "DeploymentConfigurationBase",
+ model_reference: "AssetReferenceBase",
+ tags: Optional[Dict[str, str]] = None,
+ kind: Optional[str] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ scale_settings: Optional["OnlineDeploymentScaleSettings"] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ code_configuration: Optional["CodeConfiguration"] = None,
+ environment_id: Optional[str] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResource, self).__init__(**kwargs)
+ self.tags = tags
+ self.location = location
+ self.kind = kind
+ self.identity = identity
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.scale_settings = scale_settings
+ self.deployment_configuration = deployment_configuration
+ self.provisioning_state = None
+ self.description = description
+ self.properties = properties
+ self.model_reference = model_reference
+ self.code_configuration = code_configuration
+ self.environment_id = environment_id
+ self.environment_variables = environment_variables
+
+
+class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineDeployment entities.
+
+ :param value: An array of objects of type OnlineDeployment.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OnlineDeploymentTrackedResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["OnlineDeploymentTrackedResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class OnlineEndpointTrackedResource(msrest.serialization.Model):
+ """OnlineEndpointTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location: Required.
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar provisioning_state: State of endpoint provisioning. Possible values include: "Creating",
+ "Deleting", "Succeeded", "Failed", "Updating", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.EndpointProvisioningState
+ :param description: Description of the inference endpoint.
+ :type description: str
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :param traffic_rules: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic_rules: dict[str, int]
+ :param compute_configuration: Required.
+ :type compute_configuration: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :ivar endpoint: Endpoint URI.
+ :vartype endpoint: str
+ :ivar swagger_endpoint: Endpoint Swagger URI.
+ :vartype swagger_endpoint: str
+ :param auth_mode: Required. Inference endpoint authentication mode type. Possible values
+ include: "AMLToken", "Key", "AADToken".
+ :type auth_mode: str or ~azure_machine_learning_workspaces.models.EndpointAuthModeType
+ """
+
+ _validation = {
+ 'location': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'compute_configuration': {'required': True},
+ 'endpoint': {'readonly': True},
+ 'swagger_endpoint': {'readonly': True},
+ 'auth_mode': {'required': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'properties': {'key': 'properties.properties', 'type': '{str}'},
+ 'traffic_rules': {'key': 'properties.trafficRules', 'type': '{int}'},
+ 'compute_configuration': {'key': 'properties.computeConfiguration', 'type': 'ComputeConfiguration'},
+ 'endpoint': {'key': 'properties.endpoint', 'type': 'str'},
+ 'swagger_endpoint': {'key': 'properties.swaggerEndpoint', 'type': 'str'},
+ 'auth_mode': {'key': 'properties.authMode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: str,
+ compute_configuration: "ComputeConfiguration",
+ auth_mode: Union[str, "EndpointAuthModeType"],
+ tags: Optional[Dict[str, str]] = None,
+ kind: Optional[str] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ traffic_rules: Optional[Dict[str, int]] = None,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResource, self).__init__(**kwargs)
+ self.tags = tags
+ self.location = location
+ self.kind = kind
+ self.identity = identity
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+ self.provisioning_state = None
+ self.description = description
+ self.properties = properties
+ self.traffic_rules = traffic_rules
+ self.compute_configuration = compute_configuration
+ self.endpoint = None
+ self.swagger_endpoint = None
+ self.auth_mode = auth_mode
+
+
+class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineEndpoint entities.
+
+ :param value: An array of objects of type OnlineEndpoint.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :param next_link:
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OnlineEndpointTrackedResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["OnlineEndpointTrackedResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ display: Optional["OperationDisplay"] = None,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = name
+ self.display = display
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ provider: Optional[str] = None,
+ resource: Optional[str] = None,
+ operation: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = provider
+ self.resource = resource
+ self.operation = operation
+ self.description = description
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Operation"]] = None,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class OutputData(msrest.serialization.Model):
+ """OutputData.
+
+ :param dataset_name: Output dataset name.
+ :type dataset_name: str
+ :param datastore: Datastore location for output data.
+ :type datastore: str
+ :param datapath: Path location within the datastore for output data.
+ :type datapath: str
+ :param mode: Mode type for data. Possible values include: "Mount", "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ """
+
+ _attribute_map = {
+ 'dataset_name': {'key': 'datasetName', 'type': 'str'},
+ 'datastore': {'key': 'datastore', 'type': 'str'},
+ 'datapath': {'key': 'datapath', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ dataset_name: Optional[str] = None,
+ datastore: Optional[str] = None,
+ datapath: Optional[str] = None,
+ mode: Optional[Union[str, "DataBindingMode"]] = None,
+ **kwargs
+ ):
+ super(OutputData, self).__init__(**kwargs)
+ self.dataset_name = dataset_name
+ self.datastore = datastore
+ self.datapath = datapath
+ self.mode = mode
+
+
+class OutputPathAssetReference(AssetReferenceBase):
+ """OutputPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param path:
+ :type path: str
+ :param job_id:
+ :type job_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'job_id': {'key': 'jobId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: Optional[str] = None,
+ job_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(OutputPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'OutputPath' # type: str
+ self.path = path
+ self.job_id = job_id
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ComputeResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class PaginatedServiceList(msrest.serialization.Model):
+ """Paginated list of Machine Learning service objects wrapped in ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ServiceResource]
+ :ivar next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ServiceResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedServiceList, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceConnection"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class ParameterSamplingConfiguration(msrest.serialization.Model):
+ """class for all hyperparameter sampling algorithms.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param parameter_space: Required. A dictionary containing each parameter and its distribution.
+ The dictionary key is the name of the parameter.
+ :type parameter_space: object
+ :param sampling_type: Required. Type of the hyperparameter sampling algorithms. Possible values
+ include: "Grid", "Random", "Bayesian".
+ :type sampling_type: str or ~azure_machine_learning_workspaces.models.ParameterSamplingType
+ """
+
+ _validation = {
+ 'parameter_space': {'required': True},
+ 'sampling_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'parameter_space': {'key': 'parameterSpace', 'type': 'object'},
+ 'sampling_type': {'key': 'samplingType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ parameter_space: object,
+ sampling_type: Union[str, "ParameterSamplingType"],
+ **kwargs
+ ):
+ super(ParameterSamplingConfiguration, self).__init__(**kwargs)
+ self.parameter_space = parameter_space
+ self.sampling_type = sampling_type
+
+
+class PartialOnlineDeployment(msrest.serialization.Model):
+ """Mutable online deployment configuration.
+
+ :param scale_settings:
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineDeploymentScaleSettings
+ :param deployment_configuration:
+ :type deployment_configuration:
+ ~azure_machine_learning_workspaces.models.DeploymentConfigurationBase
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineDeploymentScaleSettings'},
+ 'deployment_configuration': {'key': 'deploymentConfiguration', 'type': 'DeploymentConfigurationBase'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scale_settings: Optional["OnlineDeploymentScaleSettings"] = None,
+ deployment_configuration: Optional["DeploymentConfigurationBase"] = None,
+ **kwargs
+ ):
+ super(PartialOnlineDeployment, self).__init__(**kwargs)
+ self.scale_settings = scale_settings
+ self.deployment_configuration = deployment_configuration
+
+
+class PartialOnlineDeploymentPartialTrackedResource(msrest.serialization.Model):
+ """PartialOnlineDeploymentPartialTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location:
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineDeployment
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineDeployment'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tags: Optional[Dict[str, str]] = None,
+ location: Optional[str] = None,
+ kind: Optional[str] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ properties: Optional["PartialOnlineDeployment"] = None,
+ **kwargs
+ ):
+ super(PartialOnlineDeploymentPartialTrackedResource, self).__init__(**kwargs)
+ self.tags = tags
+ self.location = location
+ self.kind = kind
+ self.identity = identity
+ self.id = None
+ self.name = None
+ self.type = None
+ self.properties = properties
+ self.system_data = None
+
+
+class PartialOnlineEndpoint(msrest.serialization.Model):
+ """Mutable online endpoint configuration.
+
+ :param traffic_rules: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic_rules: dict[str, int]
+ """
+
+ _attribute_map = {
+ 'traffic_rules': {'key': 'trafficRules', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ traffic_rules: Optional[Dict[str, int]] = None,
+ **kwargs
+ ):
+ super(PartialOnlineEndpoint, self).__init__(**kwargs)
+ self.traffic_rules = traffic_rules
+
+
+class PartialOnlineEndpointPartialTrackedResource(msrest.serialization.Model):
+ """PartialOnlineEndpointPartialTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param tags: A set of tags. Dictionary of :code:``.
+ :type tags: dict[str, str]
+ :param location:
+ :type location: str
+ :param kind:
+ :type kind: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :ivar id: The resource URL of the entity (not URL encoded).
+ :vartype id: str
+ :ivar name: The name of the resource entity.
+ :vartype name: str
+ :ivar type: The resource provider and type.
+ :vartype type: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineEndpoint
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineEndpoint'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tags: Optional[Dict[str, str]] = None,
+ location: Optional[str] = None,
+ kind: Optional[str] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ properties: Optional["PartialOnlineEndpoint"] = None,
+ **kwargs
+ ):
+ super(PartialOnlineEndpointPartialTrackedResource, self).__init__(**kwargs)
+ self.tags = tags
+ self.location = location
+ self.kind = kind
+ self.identity = identity
+ self.id = None
+ self.name = None
+ self.type = None
+ self.properties = properties
+ self.system_data = None
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ *,
+ assigned_user: Optional["AssignedUser"] = None,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = assigned_user
+
+
+class Pipeline(msrest.serialization.Model):
+ """Pipeline.
+
+ :param continue_run_on_step_failure: Flag when set, continue pipeline execution if a step
+ fails.
+ :type continue_run_on_step_failure: bool
+ :param default_datastore_name: Default datastore name shared by all pipeline jobs.
+ :type default_datastore_name: str
+ :param component_jobs: JobDefinition set for PipelineStepJobs.
+ :type component_jobs: dict[str, ~azure_machine_learning_workspaces.models.ComponentJob]
+ :param inputs: Data input set for jobs.
+ :type inputs: dict[str, ~azure_machine_learning_workspaces.models.PipelineInput]
+ :param outputs: Data output set for jobs.
+ :type outputs: dict[str, ~azure_machine_learning_workspaces.models.PipelineOutput]
+ """
+
+ _attribute_map = {
+ 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
+ 'default_datastore_name': {'key': 'defaultDatastoreName', 'type': 'str'},
+ 'component_jobs': {'key': 'componentJobs', 'type': '{ComponentJob}'},
+ 'inputs': {'key': 'inputs', 'type': '{PipelineInput}'},
+ 'outputs': {'key': 'outputs', 'type': '{PipelineOutput}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ continue_run_on_step_failure: Optional[bool] = None,
+ default_datastore_name: Optional[str] = None,
+ component_jobs: Optional[Dict[str, "ComponentJob"]] = None,
+ inputs: Optional[Dict[str, "PipelineInput"]] = None,
+ outputs: Optional[Dict[str, "PipelineOutput"]] = None,
+ **kwargs
+ ):
+ super(Pipeline, self).__init__(**kwargs)
+ self.continue_run_on_step_failure = continue_run_on_step_failure
+ self.default_datastore_name = default_datastore_name
+ self.component_jobs = component_jobs
+ self.inputs = inputs
+ self.outputs = outputs
+
+
+class PipelineInput(msrest.serialization.Model):
+ """PipelineInput.
+
+ :param data: Input data definition.
+ :type data: ~azure_machine_learning_workspaces.models.InputData
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'InputData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data: Optional["InputData"] = None,
+ **kwargs
+ ):
+ super(PipelineInput, self).__init__(**kwargs)
+ self.data = data
+
+
+class PipelineJob(ComputeJobBase):
+ """Pipeline Job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param pipeline_type: Type of PipelineJob. Possible values include: "AzureML".
+ :type pipeline_type: str or ~azure_machine_learning_workspaces.models.PipelineType
+ :param pipeline: Pipeline details.
+ :type pipeline: ~azure_machine_learning_workspaces.models.Pipeline
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
+ 'pipeline': {'key': 'pipeline', 'type': 'Pipeline'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_binding: "ComputeBinding",
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ experiment_name: Optional[str] = None,
+ priority: Optional[int] = None,
+ pipeline_type: Optional[Union[str, "PipelineType"]] = None,
+ pipeline: Optional["Pipeline"] = None,
+ **kwargs
+ ):
+ super(PipelineJob, self).__init__(description=description, tags=tags, properties=properties, experiment_name=experiment_name, compute_binding=compute_binding, priority=priority, **kwargs)
+ self.job_type = 'Pipeline' # type: str
+ self.status = None
+ self.pipeline_type = pipeline_type
+ self.pipeline = pipeline
+
+
+class PipelineOutput(msrest.serialization.Model):
+ """PipelineOutput.
+
+ :param data: Output data definition.
+ :type data: ~azure_machine_learning_workspaces.models.OutputData
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': 'OutputData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data: Optional["OutputData"] = None,
+ **kwargs
+ ):
+ super(PipelineOutput, self).__init__(**kwargs)
+ self.data = data
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ private_endpoint: Optional["PrivateEndpoint"] = None,
+ private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.private_endpoint = private_endpoint
+ self.private_link_service_connection_state = private_link_service_connection_state
+ self.provisioning_state = None
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ required_zone_names: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = required_zone_names
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PrivateLinkResource"]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ description: Optional[str] = None,
+ actions_required: Optional[str] = None,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = status
+ self.description = description
+ self.actions_required = actions_required
+
+
+class ProgressMetrics(msrest.serialization.Model):
+ """Progress metrics definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar total_datapoint_count: The total datapoint count.
+ :vartype total_datapoint_count: long
+ :ivar completed_datapoint_count: The completed datapoint count.
+ :vartype completed_datapoint_count: long
+ :ivar skipped_datapoint_count: The skipped datapoint count.
+ :vartype skipped_datapoint_count: long
+ :ivar incremental_dataset_last_refresh_time: The time of last successful incremental dataset
+ refresh in UTC.
+ :vartype incremental_dataset_last_refresh_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'total_datapoint_count': {'readonly': True},
+ 'completed_datapoint_count': {'readonly': True},
+ 'skipped_datapoint_count': {'readonly': True},
+ 'incremental_dataset_last_refresh_time': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'},
+ 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'},
+ 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'},
+ 'incremental_dataset_last_refresh_time': {'key': 'incrementalDatasetLastRefreshTime', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ProgressMetrics, self).__init__(**kwargs)
+ self.total_datapoint_count = None
+ self.completed_datapoint_count = None
+ self.skipped_datapoint_count = None
+ self.incremental_dataset_last_refresh_time = None
+
+
+class PyTorch(DistributionConfiguration):
+ """PyTorch.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count: Total process count for the distributed job.
+ :type process_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count': {'key': 'processCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ process_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(PyTorch, self).__init__(**kwargs)
+ self.distribution_type = 'PyTorch' # type: str
+ self.process_count = process_count
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param location: Region of the AML workspace in the id.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ type: Optional[str] = None,
+ limit: Optional[int] = None,
+ unit: Optional[Union[str, "QuotaUnit"]] = None,
+ location: Optional[str] = None,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = id
+ self.type = type
+ self.limit = limit
+ self.unit = unit
+ self.location = location
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["QuotaBaseProperties"]] = None,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = value
+
+
+class RCranPackage(msrest.serialization.Model):
+ """RCranPackage.
+
+ :param name: The package name.
+ :type name: str
+ :param repository: The repository name.
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ repository: Optional[str] = None,
+ **kwargs
+ ):
+ super(RCranPackage, self).__init__(**kwargs)
+ self.name = name
+ self.repository = repository
+
+
+class RegenerateEndpointKeysRequest(msrest.serialization.Model):
+ """RegenerateEndpointKeysRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_type: Required. Specification for which type of key to generate. Primary or
+ Secondary. Possible values include: "Primary", "Secondary".
+ :type key_type: str or ~azure_machine_learning_workspaces.models.KeyType
+ :param key_value: The value the key is set to.
+ :type key_value: str
+ """
+
+ _validation = {
+ 'key_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_type': {'key': 'keyType', 'type': 'str'},
+ 'key_value': {'key': 'keyValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key_type: Union[str, "KeyType"],
+ key_value: Optional[str] = None,
+ **kwargs
+ ):
+ super(RegenerateEndpointKeysRequest, self).__init__(**kwargs)
+ self.key_type = key_type
+ self.key_value = key_value
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ passwords: Optional[List["Password"]] = None,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = passwords
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = id
+
+
+class ResourceIdentity(msrest.serialization.Model):
+ """Service identity associated with a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param type: Defines values for a ResourceIdentity's type. Possible values include:
+ "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityAssignment
+ :ivar principal_id: Oid that used as the "client_id" when authenticating.
+ :vartype principal_id: str
+ :ivar tenant_id: AAD Tenant where this identity lives.
+ :vartype tenant_id: str
+ :param user_assigned_identities: Dictionary of the user assigned identities, key is ResourceId
+ of the UAI.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentityMeta]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentityMeta}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Optional[Union[str, "ResourceIdentityAssignment"]] = None,
+ user_assigned_identities: Optional[Dict[str, "UserAssignedIdentityMeta"]] = None,
+ **kwargs
+ ):
+ super(ResourceIdentity, self).__init__(**kwargs)
+ self.type = type
+ self.principal_id = None
+ self.tenant_id = None
+ self.user_assigned_identities = user_assigned_identities
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar location: Region of the AML workspace in the id.
+ :vartype location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ reason_code: Optional[Union[str, "ReasonCode"]] = None,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = reason_code
+
+
+class RGitHubPackage(msrest.serialization.Model):
+ """RGitHubPackage.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ :param auth_token: Personal access token to install from a private repo.
+ :type auth_token: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ 'auth_token': {'key': 'authToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repository: Optional[str] = None,
+ auth_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(RGitHubPackage, self).__init__(**kwargs)
+ self.repository = repository
+ self.auth_token = auth_token
+
+
+class RGitHubPackageResponse(msrest.serialization.Model):
+ """RGitHubPackageResponse.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repository: Optional[str] = None,
+ **kwargs
+ ):
+ super(RGitHubPackageResponse, self).__init__(**kwargs)
+ self.repository = repository
+
+
+class Route(msrest.serialization.Model):
+ """Route.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param path: Required. The path for the route.
+ :type path: str
+ :param port: Required. The port for the route.
+ :type port: int
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port': {'required': True},
+ }
+
+ _attribute_map = {
+ 'path': {'key': 'path', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: str,
+ port: int,
+ **kwargs
+ ):
+ super(Route, self).__init__(**kwargs)
+ self.path = path
+ self.port = port
+
+
+class SasSection(msrest.serialization.Model):
+ """SasSection.
+
+ :param sas_token: Storage container SAS token.
+ :type sas_token: str
+ """
+
+ _attribute_map = {
+ 'sas_token': {'key': 'sasToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ sas_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(SasSection, self).__init__(**kwargs)
+ self.sas_token = sas_token
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_node_count: int,
+ min_node_count: Optional[int] = 0,
+ node_idle_time_before_scale_down: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = max_node_count
+ self.min_node_count = min_node_count
+ self.node_idle_time_before_scale_down = node_idle_time_before_scale_down
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ script_source: Optional[str] = None,
+ script_data: Optional[str] = None,
+ script_arguments: Optional[str] = None,
+ timeout: Optional[str] = None,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = script_source
+ self.script_data = script_data
+ self.script_arguments = script_arguments
+ self.timeout = timeout
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ startup_script: Optional["ScriptReference"] = None,
+ creation_script: Optional["ScriptReference"] = None,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = startup_script
+ self.creation_script = creation_script
+
+
+class ServicePrincipalConfiguration(IdentityConfiguration):
+ """ServicePrincipalConfiguration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "ServicePrincipal", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param secret: Required.
+ :type secret: str
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ 'secret': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ 'secret': {'key': 'secret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ secret: str,
+ **kwargs
+ ):
+ super(ServicePrincipalConfiguration, self).__init__(**kwargs)
+ self.identity_type = 'ServicePrincipal' # type: str
+ self.secret = secret
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: str,
+ client_secret: str,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+
+class ServicePrincipalSection(msrest.serialization.Model):
+ """ServicePrincipalSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param client_secret: Service principal secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'tenant_id': {'required': True},
+ 'client_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tenant_id: str,
+ client_id: str,
+ authority_url: Optional[str] = None,
+ resource_uri: Optional[str] = None,
+ client_secret: Optional[str] = None,
+ **kwargs
+ ):
+ super(ServicePrincipalSection, self).__init__(**kwargs)
+ self.authority_url = authority_url
+ self.resource_uri = resource_uri
+ self.tenant_id = tenant_id
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+
+class ServiceResource(Resource):
+ """Machine Learning service object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param properties: Service properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ServiceResponseBase
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'properties': {'key': 'properties', 'type': 'ServiceResponseBase'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ properties: Optional["ServiceResponseBase"] = None,
+ **kwargs
+ ):
+ super(ServiceResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.properties = properties
+
+
+class ServiceResponseBaseError(ErrorResponse):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResponseBaseError, self).__init__(**kwargs)
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scripts: Optional["ScriptsToExecute"] = None,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = scripts
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ private_link_resource_id: Optional[str] = None,
+ group_id: Optional[str] = None,
+ request_message: Optional[str] = None,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = name
+ self.private_link_resource_id = private_link_resource_id
+ self.group_id = group_id
+ self.request_message = request_message
+ self.status = status
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ tier: Optional[str] = None,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = name
+ self.tier = tier
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceSku"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class SparkMavenPackage(msrest.serialization.Model):
+ """SparkMavenPackage.
+
+ :param group:
+ :type group: str
+ :param artifact:
+ :type artifact: str
+ :param version:
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'group': {'key': 'group', 'type': 'str'},
+ 'artifact': {'key': 'artifact', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ group: Optional[str] = None,
+ artifact: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(SparkMavenPackage, self).__init__(**kwargs)
+ self.group = group
+ self.artifact = artifact
+ self.version = version
+
+
+class SqlAdminSection(msrest.serialization.Model):
+ """SqlAdminSection.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param user_id: Required. SQL database user name.
+ :type user_id: str
+ :param password: SQL database password.
+ :type password: str
+ """
+
+ _validation = {
+ 'user_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_id: str,
+ password: Optional[str] = None,
+ **kwargs
+ ):
+ super(SqlAdminSection, self).__init__(**kwargs)
+ self.user_id = user_id
+ self.password = password
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "SslConfigurationStatus"]] = None,
+ cert: Optional[str] = None,
+ key: Optional[str] = None,
+ cname: Optional[str] = None,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = status
+ self.cert = cert
+ self.key = key
+ self.cname = cname
+
+
+class StatusMessage(msrest.serialization.Model):
+ """Active message associated with project.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar level: Severity level of message. Possible values include: "Error", "Information",
+ "Warning".
+ :vartype level: str or ~azure_machine_learning_workspaces.models.StatusMessageLevel
+ :ivar code: Service-defined message code.
+ :vartype code: str
+ :ivar message: A human-readable representation of the message code.
+ :vartype message: str
+ """
+
+ _validation = {
+ 'level': {'readonly': True},
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'level': {'key': 'level', 'type': 'str'},
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(StatusMessage, self).__init__(**kwargs)
+ self.level = None
+ self.code = None
+ self.message = None
+
+
+class SweepJob(ComputeJobBase):
+ """SweepJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
+ "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar interaction_endpoints: Dictionary of endpoint URIs, keyed by enumerated job endpoints.
+ For local jobs, a job endpoint will have a value of FileStreamObject.
+ :vartype interaction_endpoints:
+ ~azure_machine_learning_workspaces.models.JobBaseInteractionEndpoints
+ :param description: The asset description text.
+ :type description: str
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param compute_binding: Required. Compute binding for the job.
+ :type compute_binding: ~azure_machine_learning_workspaces.models.ComputeBinding
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview is only for whitelisted customers.
+ :type priority: int
+ :ivar status: The status of a job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param parameter_sampling_configuration: Required. class for all hyperparameter sampling
+ algorithms.
+ :type parameter_sampling_configuration:
+ ~azure_machine_learning_workspaces.models.ParameterSamplingConfiguration
+ :param termination_configuration:
+ :type termination_configuration:
+ ~azure_machine_learning_workspaces.models.TerminationConfiguration
+ :param evaluation_configuration: Required.
+ :type evaluation_configuration:
+ ~azure_machine_learning_workspaces.models.EvaluationConfiguration
+ :param trial_component:
+ :type trial_component: ~azure_machine_learning_workspaces.models.TrialComponent
+ :param identity_configuration:
+ :type identity_configuration: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ """
+
+ _validation = {
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'compute_binding': {'required': True},
+ 'output': {'readonly': True},
+ 'status': {'readonly': True},
+ 'parameter_sampling_configuration': {'required': True},
+ 'evaluation_configuration': {'required': True},
+ }
+
+ _attribute_map = {
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': 'JobBaseInteractionEndpoints'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'compute_binding': {'key': 'computeBinding', 'type': 'ComputeBinding'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'parameter_sampling_configuration': {'key': 'parameterSamplingConfiguration', 'type': 'ParameterSamplingConfiguration'},
+ 'termination_configuration': {'key': 'terminationConfiguration', 'type': 'TerminationConfiguration'},
+ 'evaluation_configuration': {'key': 'evaluationConfiguration', 'type': 'EvaluationConfiguration'},
+ 'trial_component': {'key': 'trialComponent', 'type': 'TrialComponent'},
+ 'identity_configuration': {'key': 'identityConfiguration', 'type': 'IdentityConfiguration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_binding: "ComputeBinding",
+ parameter_sampling_configuration: "ParameterSamplingConfiguration",
+ evaluation_configuration: "EvaluationConfiguration",
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ experiment_name: Optional[str] = None,
+ priority: Optional[int] = None,
+ termination_configuration: Optional["TerminationConfiguration"] = None,
+ trial_component: Optional["TrialComponent"] = None,
+ identity_configuration: Optional["IdentityConfiguration"] = None,
+ **kwargs
+ ):
+ super(SweepJob, self).__init__(description=description, tags=tags, properties=properties, experiment_name=experiment_name, compute_binding=compute_binding, priority=priority, **kwargs)
+ self.job_type = 'Sweep' # type: str
+ self.status = None
+ self.parameter_sampling_configuration = parameter_sampling_configuration
+ self.termination_configuration = termination_configuration
+ self.evaluation_configuration = evaluation_configuration
+ self.trial_component = trial_component
+ self.identity_configuration = identity_configuration
+
+
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ *,
+ created_by: Optional[str] = None,
+ created_by_type: Optional[Union[str, "CreatedByType"]] = None,
+ created_at: Optional[datetime.datetime] = None,
+ last_modified_by: Optional[str] = None,
+ last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None,
+ last_modified_at: Optional[datetime.datetime] = None,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = created_by
+ self.created_by_type = created_by_type
+ self.created_at = created_at
+ self.last_modified_by = last_modified_by
+ self.last_modified_by_type = last_modified_by_type
+ self.last_modified_at = last_modified_at
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class TensorFlow(DistributionConfiguration):
+ """TensorFlow.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param worker_count: Number of workers. Overwrites the node count in compute binding.
+ :type worker_count: int
+ :param parameter_server_count:
+ :type parameter_server_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'worker_count': {'key': 'workerCount', 'type': 'int'},
+ 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ worker_count: Optional[int] = None,
+ parameter_server_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(TensorFlow, self).__init__(**kwargs)
+ self.distribution_type = 'TensorFlow' # type: str
+ self.worker_count = worker_count
+ self.parameter_server_count = parameter_server_count
+
+
+class TerminationConfiguration(msrest.serialization.Model):
+ """TerminationConfiguration.
+
+ :param max_total_runs:
+ :type max_total_runs: int
+ :param max_concurrent_runs:
+ :type max_concurrent_runs: int
+ :param max_duration_minutes:
+ :type max_duration_minutes: int
+ :param early_termination_policy_configuration: Early termination policies enable canceling
+ poor-performing runs before they complete.
+ :type early_termination_policy_configuration:
+ ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyConfiguration
+ """
+
+ _attribute_map = {
+ 'max_total_runs': {'key': 'maxTotalRuns', 'type': 'int'},
+ 'max_concurrent_runs': {'key': 'maxConcurrentRuns', 'type': 'int'},
+ 'max_duration_minutes': {'key': 'maxDurationMinutes', 'type': 'int'},
+ 'early_termination_policy_configuration': {'key': 'earlyTerminationPolicyConfiguration', 'type': 'EarlyTerminationPolicyConfiguration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_total_runs: Optional[int] = None,
+ max_concurrent_runs: Optional[int] = None,
+ max_duration_minutes: Optional[int] = None,
+ early_termination_policy_configuration: Optional["EarlyTerminationPolicyConfiguration"] = None,
+ **kwargs
+ ):
+ super(TerminationConfiguration, self).__init__(**kwargs)
+ self.max_total_runs = max_total_runs
+ self.max_concurrent_runs = max_concurrent_runs
+ self.max_duration_minutes = max_duration_minutes
+ self.early_termination_policy_configuration = early_termination_policy_configuration
+
+
+class TrainingDataSettings(msrest.serialization.Model):
+ """Dataset datamodel.
+This is the class represents the Dataset Json string structure that passed into Jasmine.
+
+ :param dataset_arm_id: The Dataset Arm Id.
+ :type dataset_arm_id: str
+ :param target_column_name: Label column name.
+ :type target_column_name: str
+ :param weight_column_name: Weight column name.
+ :type weight_column_name: str
+ """
+
+ _attribute_map = {
+ 'dataset_arm_id': {'key': 'datasetArmId', 'type': 'str'},
+ 'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
+ 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ dataset_arm_id: Optional[str] = None,
+ target_column_name: Optional[str] = None,
+ weight_column_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(TrainingDataSettings, self).__init__(**kwargs)
+ self.dataset_arm_id = dataset_arm_id
+ self.target_column_name = target_column_name
+ self.weight_column_name = weight_column_name
+
+
+class TrainingSettings(msrest.serialization.Model):
+ """Training related configuration.
+
+ :param trial_timeout_in_minutes: Iteration Timeout.
+ :type trial_timeout_in_minutes: int
+ :param block_list_models: List of Algorithms/Models to be blocked for training.
+ :type block_list_models: list[str]
+ :param allow_list_models: List of Algorithms/Models to be Allowed for training.
+ :type allow_list_models: list[str]
+ :param experiment_exit_score: Exit score for the AutoML experiment.
+ :type experiment_exit_score: float
+ :param enable_early_termination: Enable early termination.
+ :type enable_early_termination: bool
+ """
+
+ _attribute_map = {
+ 'trial_timeout_in_minutes': {'key': 'trialTimeoutInMinutes', 'type': 'int'},
+ 'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
+ 'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
+ 'experiment_exit_score': {'key': 'experimentExitScore', 'type': 'float'},
+ 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ trial_timeout_in_minutes: Optional[int] = None,
+ block_list_models: Optional[List[str]] = None,
+ allow_list_models: Optional[List[str]] = None,
+ experiment_exit_score: Optional[float] = None,
+ enable_early_termination: Optional[bool] = None,
+ **kwargs
+ ):
+ super(TrainingSettings, self).__init__(**kwargs)
+ self.trial_timeout_in_minutes = trial_timeout_in_minutes
+ self.block_list_models = block_list_models
+ self.allow_list_models = allow_list_models
+ self.experiment_exit_score = experiment_exit_score
+ self.enable_early_termination = enable_early_termination
+
+
+class TrialComponent(msrest.serialization.Model):
+ """TrialComponent.
+
+ :param code_configuration: Code configuration of the job.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param environment_id: Environment id of the job.
+ :type environment_id: str
+ :param data_bindings: Mapping of data bindings used in the job.
+ :type data_bindings: dict[str, ~azure_machine_learning_workspaces.models.DataBinding]
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param distribution_configuration:
+ :type distribution_configuration:
+ ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ """
+
+ _attribute_map = {
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'data_bindings': {'key': 'dataBindings', 'type': '{DataBinding}'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'distribution_configuration': {'key': 'distributionConfiguration', 'type': 'DistributionConfiguration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ code_configuration: Optional["CodeConfiguration"] = None,
+ environment_id: Optional[str] = None,
+ data_bindings: Optional[Dict[str, "DataBinding"]] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ distribution_configuration: Optional["DistributionConfiguration"] = None,
+ **kwargs
+ ):
+ super(TrialComponent, self).__init__(**kwargs)
+ self.code_configuration = code_configuration
+ self.environment_id = environment_id
+ self.data_bindings = data_bindings
+ self.environment_variables = environment_variables
+ self.distribution_configuration = distribution_configuration
+
+
+class TruncationSelectionPolicyConfiguration(EarlyTerminationPolicyConfiguration):
+ """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param evaluation_interval:
+ :type evaluation_interval: int
+ :param delay_evaluation:
+ :type delay_evaluation: int
+ :param truncation_percentage:
+ :type truncation_percentage: int
+ :param exclude_finished_jobs:
+ :type exclude_finished_jobs: bool
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
+ 'exclude_finished_jobs': {'key': 'excludeFinishedJobs', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ evaluation_interval: Optional[int] = None,
+ delay_evaluation: Optional[int] = None,
+ truncation_percentage: Optional[int] = None,
+ exclude_finished_jobs: Optional[bool] = None,
+ **kwargs
+ ):
+ super(TruncationSelectionPolicyConfiguration, self).__init__(evaluation_interval=evaluation_interval, delay_evaluation=delay_evaluation, **kwargs)
+ self.policy_type = 'TruncationSelection' # type: str
+ self.truncation_percentage = truncation_percentage
+ self.exclude_finished_jobs = exclude_finished_jobs
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ limit: Optional[int] = None,
+ status: Optional[Union[str, "Status"]] = None,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = limit
+ self.unit = None
+ self.status = status
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ admin_user_name: str,
+ admin_user_ssh_public_key: Optional[str] = None,
+ admin_user_password: Optional[str] = None,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = admin_user_name
+ self.admin_user_ssh_public_key = admin_user_ssh_public_key
+ self.admin_user_password = admin_user_password
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class UserAssignedIdentityMeta(msrest.serialization.Model):
+ """User assigned identities associated with a resource.
+
+ :param principal_id: the object ID of the service principal object for your managed identity
+ that is used to grant role-based access to an Azure resource.
+ :type principal_id: str
+ :param client_id: aka appId, a unique identifier generated by Azure AD that is tied to an
+ application and service principal during its initial provisioning.
+ :type client_id: str
+ """
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ principal_id: Optional[str] = None,
+ client_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(UserAssignedIdentityMeta, self).__init__(**kwargs)
+ self.principal_id = principal_id
+ self.client_id = client_id
+
+
+class ValidationDataSettings(msrest.serialization.Model):
+ """ValidationDataSettings.
+
+ :param dataset_arm_id: Dataset Arm id..
+ :type dataset_arm_id: str
+ :param n_cross_validations: Number of cross validation folds to be applied on training dataset
+ when validation dataset is not provided.
+ :type n_cross_validations: int
+ :param validation_size: The fraction of training dataset that needs to be set aside for
+ validation purpose.
+ Values between (0.0 , 1.0)
+ Applied when validation dataset is not provided.
+ :type validation_size: float
+ """
+
+ _attribute_map = {
+ 'dataset_arm_id': {'key': 'datasetArmId', 'type': 'str'},
+ 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'int'},
+ 'validation_size': {'key': 'validationSize', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ *,
+ dataset_arm_id: Optional[str] = None,
+ n_cross_validations: Optional[int] = None,
+ validation_size: Optional[float] = None,
+ **kwargs
+ ):
+ super(ValidationDataSettings, self).__init__(**kwargs)
+ self.dataset_arm_id = dataset_arm_id
+ self.n_cross_validations = n_cross_validations
+ self.validation_size = validation_size
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["VirtualMachineProperties"] = None,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = properties
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = id
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ virtual_machine_size: Optional[str] = None,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = virtual_machine_size
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = administrator_account
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ }
+
+ def __init__(
+ self,
+ *,
+ estimated_vm_prices: Optional["EstimatedVmPrices"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = estimated_vm_prices
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param aml_compute: The list of virtual machine sizes supported by AmlCompute.
+ :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ aml_compute: Optional[List["VirtualMachineSize"]] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.aml_compute = aml_compute
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ public_key_data: Optional[str] = None,
+ private_key_data: Optional[str] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = username
+ self.password = password
+ self.public_key_data = public_key_data
+ self.private_key_data = private_key_data
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format.
+ :vartype creation_time: ~datetime.datetime
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'creation_time': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ key_vault: Optional[str] = None,
+ application_insights: Optional[str] = None,
+ container_registry: Optional[str] = None,
+ storage_account: Optional[str] = None,
+ discovery_url: Optional[str] = None,
+ encryption: Optional["EncryptionProperty"] = None,
+ hbi_workspace: Optional[bool] = False,
+ image_build_compute: Optional[str] = None,
+ allow_public_access_when_behind_vnet: Optional[bool] = False,
+ shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.workspace_id = None
+ self.description = description
+ self.friendly_name = friendly_name
+ self.creation_time = None
+ self.key_vault = key_vault
+ self.application_insights = application_insights
+ self.container_registry = container_registry
+ self.storage_account = storage_account
+ self.discovery_url = discovery_url
+ self.provisioning_state = None
+ self.encryption = encryption
+ self.hbi_workspace = hbi_workspace
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = image_build_compute
+ self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = shared_private_link_resources
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+
+
+class WorkspaceConnectionDto(msrest.serialization.Model):
+ """object used for creating workspace connection.
+
+ :param name: Friendly name of the workspace connection.
+ :type name: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnectionDto, self).__init__(**kwargs)
+ self.name = name
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Workspace"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ restrictions: Optional[List["Restriction"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = restrictions
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = tags
+ self.sku = sku
+ self.description = description
+ self.friendly_name = friendly_name
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
new file mode 100644
index 00000000000..9cd96ead8ac
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
@@ -0,0 +1,69 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._machine_learning_compute_operations import MachineLearningComputeOperations
+from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._linked_services_operations import LinkedServicesOperations
+from ._machine_learning_service_operations import MachineLearningServiceOperations
+from ._notebooks_operations import NotebooksOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._code_containers_operations import CodeContainersOperations
+from ._code_versions_operations import CodeVersionsOperations
+from ._component_containers_operations import ComponentContainersOperations
+from ._component_versions_operations import ComponentVersionsOperations
+from ._data_containers_operations import DataContainersOperations
+from ._datastores_operations import DatastoresOperations
+from ._data_versions_operations import DataVersionsOperations
+from ._environment_containers_operations import EnvironmentContainersOperations
+from ._environment_specification_versions_operations import EnvironmentSpecificationVersionsOperations
+from ._jobs_operations import JobsOperations
+from ._labeling_jobs_operations import LabelingJobsOperations
+from ._model_containers_operations import ModelContainersOperations
+from ._model_versions_operations import ModelVersionsOperations
+from ._online_deployments_operations import OnlineDeploymentsOperations
+from ._online_endpoints_operations import OnlineEndpointsOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'WorkspaceFeaturesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'MachineLearningComputeOperations',
+ 'AzureMachineLearningWorkspacesOperationsMixin',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'LinkedServicesOperations',
+ 'MachineLearningServiceOperations',
+ 'NotebooksOperations',
+ 'WorkspaceConnectionsOperations',
+ 'CodeContainersOperations',
+ 'CodeVersionsOperations',
+ 'ComponentContainersOperations',
+ 'ComponentVersionsOperations',
+ 'DataContainersOperations',
+ 'DatastoresOperations',
+ 'DataVersionsOperations',
+ 'EnvironmentContainersOperations',
+ 'EnvironmentSpecificationVersionsOperations',
+ 'JobsOperations',
+ 'LabelingJobsOperations',
+ 'ModelContainersOperations',
+ 'ModelVersionsOperations',
+ 'OnlineDeploymentsOperations',
+ 'OnlineEndpointsOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py
new file mode 100644
index 00000000000..f4bbe9232f0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py
@@ -0,0 +1,94 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class AzureMachineLearningWorkspacesOperationsMixin(object):
+
+ def list_skus(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.SkuListResult"]
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_skus.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_containers_operations.py
new file mode 100644
index 00000000000..2e248781b44
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_containers_operations.py
@@ -0,0 +1,336 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeContainersOperations(object):
+ """CodeContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.CodeContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.CodeContainerResourceArmPaginatedResult"]
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.CodeContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_versions_operations.py
new file mode 100644
index 00000000000..d70b3f84167
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_versions_operations.py
@@ -0,0 +1,362 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeVersionsOperations(object):
+ """CodeVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.CodeVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeVersionResource"
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.CodeVersionResourceArmPaginatedResult"]
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.CodeVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_component_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_component_containers_operations.py
new file mode 100644
index 00000000000..10e587d218c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_component_containers_operations.py
@@ -0,0 +1,336 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ComponentContainersOperations(object):
+ """ComponentContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ComponentContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComponentContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ComponentContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ComponentContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComponentContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ComponentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComponentContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComponentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ComponentContainerResourceArmPaginatedResult"]
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ComponentContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ComponentContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_component_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_component_versions_operations.py
new file mode 100644
index 00000000000..6b72634c11f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_component_versions_operations.py
@@ -0,0 +1,362 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ComponentVersionsOperations(object):
+ """ComponentVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ComponentVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComponentVersionResource"
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ComponentVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ComponentVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComponentVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ComponentVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComponentVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComponentVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComponentVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComponentVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ComponentVersionResourceArmPaginatedResult"]
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ComponentVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComponentVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ComponentVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_containers_operations.py
new file mode 100644
index 00000000000..55939e1535a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_containers_operations.py
@@ -0,0 +1,336 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DataContainersOperations(object):
+ """DataContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DataContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.DataContainerResourceArmPaginatedResult"]
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.DataContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_versions_operations.py
new file mode 100644
index 00000000000..0c2fe2f6bfa
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_versions_operations.py
@@ -0,0 +1,362 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DataVersionsOperations(object):
+ """DataVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DataVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataVersionResource"
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.DataVersionResourceArmPaginatedResult"]
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.DataVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_datastores_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_datastores_operations.py
new file mode 100644
index 00000000000..5de8e8b212b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_datastores_operations.py
@@ -0,0 +1,432 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DatastoresOperations(object):
+ """DatastoresOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ count=30, # type: Optional[int]
+ is_default=None, # type: Optional[bool]
+ names=None, # type: Optional[List[str]]
+ search_text=None, # type: Optional[str]
+ order_by=None, # type: Optional[str]
+ order_by_asc=False, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.DatastorePropertiesResourceArmPaginatedResult"]
+ """List datastores.
+
+ List datastores.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :param is_default: Filter down to the workspace default datastore.
+ :type is_default: bool
+ :param names: Names of datastores to return.
+ :type names: list[str]
+ :param search_text: Text to search for in the datastore names.
+ :type search_text: str
+ :param order_by: Order by property (createdtime | modifiedtime | name).
+ :type order_by: str
+ :param order_by_asc: Order by property in ascending order.
+ :type order_by_asc: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DatastorePropertiesResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.DatastorePropertiesResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if is_default is not None:
+ query_parameters['isDefault'] = self._serialize.query("is_default", is_default, 'bool')
+ if names is not None:
+ query_parameters['names'] = self._serialize.query("names", names, '[str]')
+ if search_text is not None:
+ query_parameters['searchText'] = self._serialize.query("search_text", search_text, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if order_by_asc is not None:
+ query_parameters['orderByAsc'] = self._serialize.query("order_by_asc", order_by_asc, 'bool')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('DatastorePropertiesResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete datastore.
+
+ Delete datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DatastorePropertiesResource"
+ """Get datastore.
+
+ Get datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DatastorePropertiesResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DatastorePropertiesResource"
+ """Create or update datastore.
+
+ Create or update datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Datastore entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DatastorePropertiesResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ def list_secrets(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DatastoreCredentials"
+ """Get datastore secrets.
+
+ Get datastore secrets.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastoreCredentials, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastoreCredentials"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_secrets.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastoreCredentials', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_secrets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_containers_operations.py
new file mode 100644
index 00000000000..dead0ea7ec5
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_containers_operations.py
@@ -0,0 +1,336 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentContainersOperations(object):
+ """EnvironmentContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.EnvironmentContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.EnvironmentContainerResourceArmPaginatedResult"]
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.EnvironmentContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_specification_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_specification_versions_operations.py
new file mode 100644
index 00000000000..448874379ea
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_specification_versions_operations.py
@@ -0,0 +1,362 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentSpecificationVersionsOperations(object):
+ """EnvironmentSpecificationVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.EnvironmentSpecificationVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentSpecificationVersionResource"
+ """Creates or updates an EnvironmentSpecificationVersion.
+
+ Creates or updates an EnvironmentSpecificationVersion.
+
+ :param name: Name of EnvironmentSpecificationVersion.
+ :type name: str
+ :param version: Version of EnvironmentSpecificationVersion.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Definition of EnvironmentSpecificationVersion.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentSpecificationVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentSpecificationVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentSpecificationVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_jobs_operations.py
new file mode 100644
index 00000000000..e5aa3b988e1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_jobs_operations.py
@@ -0,0 +1,478 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class JobsOperations(object):
+ """JobsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def create_or_update(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.JobBaseResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.JobBaseResource"
+ """Creates and executes a Job.
+
+ Creates and executes a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Job definition object.
+ :type body: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'JobBaseResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def get(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.JobBaseResource"
+ """Gets a Job by name/id.
+
+ Gets a Job by name/id.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def begin_delete(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes a Job.
+
+ Deletes a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ job_type=None, # type: Optional[str]
+ tags=None, # type: Optional[str]
+ tag=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.JobBaseResourceArmPaginatedResult"]
+ """Lists Jobs in the workspace.
+
+ Lists Jobs in the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param job_type: Type of job to be returned.
+ :type job_type: str
+ :param tags: Tags for job to be returned.
+ :type tags: str
+ :param tag: Jobs returned will have this tag key.
+ :type tag: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.JobBaseResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if job_type is not None:
+ query_parameters['jobType'] = self._serialize.query("job_type", job_type, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('JobBaseResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs'} # type: ignore
+
+ def cancel(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Cancels a Job.
+
+ Cancels a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.cancel.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_labeling_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_labeling_jobs_operations.py
new file mode 100644
index 00000000000..c7486743e75
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_labeling_jobs_operations.py
@@ -0,0 +1,753 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class LabelingJobsOperations(object):
+ """LabelingJobsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def _create_or_update_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.LabelingJobResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.LabelingJobResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'LabelingJobResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.LabelingJobResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.LabelingJobResource"]
+ """Creates or updates a labeling job.
+
+ Creates or updates a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: LabelingJob definition object.
+ :type body: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either LabelingJobResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def get(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ include_job_instructions=None, # type: Optional[bool]
+ include_label_categories=None, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.LabelingJobResource"
+ """Gets a labeling job by name/id.
+
+ Gets a labeling job by name/id.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param include_job_instructions: Boolean value to indicate whether to include JobInstructions
+ in response.
+ :type include_job_instructions: bool
+ :param include_label_categories: Boolean value to indicate Whether to include LabelCategories
+ in response.
+ :type include_label_categories: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LabelingJobResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if include_job_instructions is not None:
+ query_parameters['includeJobInstructions'] = self._serialize.query("include_job_instructions", include_job_instructions, 'bool')
+ if include_label_categories is not None:
+ query_parameters['includeLabelCategories'] = self._serialize.query("include_label_categories", include_label_categories, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def delete(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a labeling job.
+
+ Delete a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ count=None, # type: Optional[int]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.LabelingJobResourceArmPaginatedResult"]
+ """Lists labeling jobs in the workspace.
+
+ Lists labeling jobs in the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param count: Number of labeling jobs to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.LabelingJobResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('LabelingJobResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs'} # type: ignore
+
+ def pause(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Pause a labeling job.
+
+ Pause a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.pause.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ pause.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause'} # type: ignore
+
+ def _resume_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resume_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _resume_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
+
+ def begin_resume(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Resume a labeling job.
+
+ Resume a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._resume_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resume.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
+
+ def _export_labels_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ExportSummary"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.ExportSummary"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ExportSummary"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._export_labels_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ExportSummary')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _export_labels_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
+
+ def begin_export_labels(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ExportSummary"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ExportSummary"]
+ """Export labels from a labeling job.
+
+ Export labels from a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The export summary.
+ :type body: ~azure_machine_learning_workspaces.models.ExportSummary
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ExportSummary or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ExportSummary]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ExportSummary"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._export_labels_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_export_labels.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_linked_services_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_linked_services_operations.py
new file mode 100644
index 00000000000..65073412feb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_linked_services_operations.py
@@ -0,0 +1,302 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class LinkedServicesOperations(object):
+ """LinkedServicesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.LinkedServiceList"
+ """List all linked services under an AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LinkedServiceList, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LinkedServiceList
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LinkedServiceList', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices'} # type: ignore
+
+ def create(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ link_name, # type: str
+ parameters, # type: "models.LinkedServiceRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.LinkedServiceResponse"
+ """Add a new linked service.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param link_name: Friendly name of the linked workspace.
+ :type link_name: str
+ :param parameters: The object for creating or updating a linked service.
+ :type parameters: ~azure_machine_learning_workspaces.models.LinkedServiceRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LinkedServiceResponse, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LinkedServiceResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'linkName': self._serialize.url("link_name", link_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'LinkedServiceRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LinkedServiceResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices/{linkName}'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ link_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.LinkedServiceResponse"
+ """Get the detail of a linked service.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param link_name: Friendly name of the linked workspace.
+ :type link_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LinkedServiceResponse, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LinkedServiceResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'linkName': self._serialize.url("link_name", link_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LinkedServiceResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices/{linkName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ link_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a linked service.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param link_name: Friendly name of the linked workspace.
+ :type link_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'linkName': self._serialize.url("link_name", link_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/linkedServices/{linkName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py
new file mode 100644
index 00000000000..93420d80340
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py
@@ -0,0 +1,931 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningComputeOperations(object):
+ """MachineLearningComputeOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedComputeResourcesList"]
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.AmlComputeNodesInformation"]
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeSecrets"
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ def start(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.start.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def stop(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.stop.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def restart(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_service_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_service_operations.py
new file mode 100644
index 00000000000..66047706373
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_service_operations.py
@@ -0,0 +1,444 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningServiceOperations(object):
+ """MachineLearningServiceOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ model_id=None, # type: Optional[str]
+ model_name=None, # type: Optional[str]
+ tag=None, # type: Optional[str]
+ tags=None, # type: Optional[str]
+ properties=None, # type: Optional[str]
+ run_id=None, # type: Optional[str]
+ expand=None, # type: Optional[bool]
+ orderby=None, # type: Optional[Union[str, "models.OrderString"]]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedServiceList"]
+ """Gets services in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param model_id: The Model Id.
+ :type model_id: str
+ :param model_name: The Model name.
+ :type model_name: str
+ :param tag: The object tag.
+ :type tag: str
+ :param tags: A set of tags with which to filter the returned services. It is a comma separated
+ string of tags key or tags key=value Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned services. It is a
+ comma separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param run_id: runId for model associated with service.
+ :type run_id: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :param orderby: The option to order the response.
+ :type orderby: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedServiceList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedServiceList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedServiceList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if model_id is not None:
+ query_parameters['modelId'] = self._serialize.query("model_id", model_id, 'str')
+ if model_name is not None:
+ query_parameters['modelName'] = self._serialize.query("model_name", model_name, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if run_id is not None:
+ query_parameters['runId'] = self._serialize.query("run_id", run_id, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+ if orderby is not None:
+ query_parameters['orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedServiceList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ expand=False, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ServiceResource"
+ """Get a Service by name.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ServiceResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ServiceResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a specific Service..
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ properties, # type: "models.CreateServiceRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.ServiceResource"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ServiceResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'CreateServiceRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ properties, # type: "models.CreateServiceRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ServiceResource"]
+ """Creates or updates service. This call will update a service if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new service, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param properties: The payload that is used to create or update the Service.
+ :type properties: ~azure_machine_learning_workspaces.models.CreateServiceRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ServiceResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ServiceResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_containers_operations.py
new file mode 100644
index 00000000000..c570149c501
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_containers_operations.py
@@ -0,0 +1,341 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelContainersOperations(object):
+ """ModelContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ count=None, # type: Optional[int]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ModelContainerResourceArmPaginatedResult"]
+ """List model containers.
+
+ List model containers.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ModelContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ModelContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_versions_operations.py
new file mode 100644
index 00000000000..b31f46a4255
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_versions_operations.py
@@ -0,0 +1,389 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelVersionsOperations(object):
+ """ModelVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ version=None, # type: Optional[str]
+ description=None, # type: Optional[str]
+ offset=None, # type: Optional[int]
+ tags=None, # type: Optional[str]
+ properties=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ModelVersionResourceArmPaginatedResult"]
+ """List model versions.
+
+ List model versions.
+
+ :param name: Model name.
+ :type name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param version: Model version.
+ :type version: str
+ :param description: Model description.
+ :type description: str
+ :param offset: Number of initial results to skip.
+ :type offset: int
+ :param tags: Comma-separated list of tag names (and optionally values). Example:
+ tag1,tag2=value2.
+ :type tags: str
+ :param properties: Comma-separated list of property names (and optionally values). Example:
+ prop1,prop2=value2.
+ :type properties: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ModelVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if version is not None:
+ query_parameters['version'] = self._serialize.query("version", version, 'str')
+ if description is not None:
+ query_parameters['description'] = self._serialize.query("description", description, 'str')
+ if offset is not None:
+ query_parameters['offset'] = self._serialize.query("offset", offset, 'int')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ModelVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelVersionResource"
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py
new file mode 100644
index 00000000000..8de5e2cc205
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py
@@ -0,0 +1,226 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class NotebooksOperations(object):
+ """NotebooksOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def _prepare_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.NotebookResourceInfo"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def begin_prepare(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.NotebookResourceInfo"]
+ """prepare.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._prepare_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListNotebookKeysResult"
+ """list_keys.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_deployments_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_deployments_operations.py
new file mode 100644
index 00000000000..218f1e2f36a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_deployments_operations.py
@@ -0,0 +1,727 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineDeploymentsOperations(object):
+ """OnlineDeploymentsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]
+ """List Inference Endpoint Deployments.
+
+ List Inference Endpoint Deployments.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Top of list.
+ :type top: int
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments'} # type: ignore
+
+ def _delete_initial(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Delete Inference Endpoint Deployment.
+
+ Delete Inference Endpoint Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def get(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineDeploymentTrackedResource"
+ """Get Inference Deployment Deployment.
+
+ Get Inference Deployment Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineDeploymentTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineDeploymentTrackedResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineDeploymentTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineDeploymentTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineDeploymentTrackedResource"]
+ """Create or update Inference Endpoint Deployment.
+
+ Create or update Inference Endpoint Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Inference Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineDeploymentPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.OnlineDeploymentTrackedResource"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineDeploymentTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineDeploymentPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def begin_update(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineDeploymentPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineDeploymentTrackedResource"]
+ """Update Online Deployment.
+
+ Update Online Deployment.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineDeploymentPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def get_logs(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DeploymentLogsRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DeploymentLogs"
+ """Polls an Endpoint operation.
+
+ Polls an Endpoint operation.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The name and identifier for the endpoint.
+ :type deployment_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The request containing parameters for retrieving logs.
+ :type body: ~azure_machine_learning_workspaces.models.DeploymentLogsRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DeploymentLogs, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DeploymentLogs
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DeploymentLogs"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_logs.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DeploymentLogsRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DeploymentLogs', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_logs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_endpoints_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_endpoints_operations.py
new file mode 100644
index 00000000000..c6d0e517a89
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_endpoints_operations.py
@@ -0,0 +1,910 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineEndpointsOperations(object):
+ """OnlineEndpointsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ name=None, # type: Optional[str]
+ count=None, # type: Optional[int]
+ compute_type=None, # type: Optional[Union[str, "models.EndpointComputeType"]]
+ skiptoken=None, # type: Optional[str]
+ tags=None, # type: Optional[str]
+ properties=None, # type: Optional[str]
+ order_by=None, # type: Optional[Union[str, "models.OrderString"]]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OnlineEndpointTrackedResourceArmPaginatedResult"]
+ """List Online Endpoints.
+
+ List Online Endpoints.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param name: Name of the endpoint.
+ :type name: str
+ :param count: Number of endpoints to be retrieved in a page of results.
+ :type count: int
+ :param compute_type: EndpointComputeType to be filtered by.
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :param tags: A set of tags with which to filter the returned models. It is a comma separated
+ string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned models. It is a comma
+ separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param order_by: The option to order the response.
+ :type order_by: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if name is not None:
+ query_parameters['name'] = self._serialize.query("name", name, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if compute_type is not None:
+ query_parameters['computeType'] = self._serialize.query("compute_type", compute_type, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints'} # type: ignore
+
+ def _delete_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Delete Online Endpoint.
+
+ Delete Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def get(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineEndpointTrackedResource"
+ """Get Online Endpoint.
+
+ Get Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineEndpointTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineEndpointTrackedResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineEndpointTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineEndpointTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineEndpointTrackedResource"]
+ """Create or update Online Endpoint.
+
+ Create or update Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineEndpointPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.OnlineEndpointTrackedResource"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineEndpointTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineEndpointPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def begin_update(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineEndpointPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineEndpointTrackedResource"]
+ """Update Online Endpoint.
+
+ Update Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineEndpointPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def _regenerate_keys_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.RegenerateEndpointKeysRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._regenerate_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'RegenerateEndpointKeysRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _regenerate_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ def begin_regenerate_keys(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.RegenerateEndpointKeysRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: RegenerateKeys request .
+ :type body: ~azure_machine_learning_workspaces.models.RegenerateEndpointKeysRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._regenerate_keys_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ def list_keys(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EndpointAuthKeys"
+ """List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthKeys, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthKeys"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthKeys', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys'} # type: ignore
+
+ def get_token(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EndpointAuthToken"
+ """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthToken, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthToken
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthToken"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthToken', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
new file mode 100644
index 00000000000..ded79842a5b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations(object):
+ """Operations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OperationListResult"]
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..edb316a6d7a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,245 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations(object):
+ """PrivateEndpointConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def put(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ properties, # type: "models.PrivateEndpointConnection"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.put.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..02437fc2f1e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
@@ -0,0 +1,104 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations(object):
+ """PrivateLinkResourcesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateLinkResourceListResult"
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
new file mode 100644
index 00000000000..2bbc03397e5
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
@@ -0,0 +1,181 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations(object):
+ """QuotasOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def update(
+ self,
+ location, # type: str
+ parameters, # type: "models.QuotaUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.UpdateWorkspaceQuotasResult"
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListWorkspaceQuotas"]
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
new file mode 100644
index 00000000000..7350d9b3049
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations(object):
+ """UsagesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListUsagesResult"]
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..35714054bcb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,100 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations(object):
+ """VirtualMachineSizesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.VirtualMachineSizeListResult"
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..78354fa0f12
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
@@ -0,0 +1,329 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations(object):
+ """WorkspaceConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ target=None, # type: Optional[str]
+ category=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedWorkspaceConnectionsList"]
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ def create(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ parameters, # type: "models.WorkspaceConnectionDto"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Add a new workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnectionDto
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..68726a845b1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
@@ -0,0 +1,122 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations(object):
+ """WorkspaceFeaturesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListAmlUserFeatureResult"]
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..f625de8d290
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
@@ -0,0 +1,688 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations(object):
+ """WorkspacesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.Workspace"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.Workspace"]
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.WorkspaceUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListWorkspaceKeysResult"
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ def resync_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.resync_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
new file mode 100644
index 00000000000..e5aff4f83af
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/src/machinelearningservices/report.md b/src/machinelearningservices/report.md
new file mode 100644
index 00000000000..592e3e17673
--- /dev/null
+++ b/src/machinelearningservices/report.md
@@ -0,0 +1,2837 @@
+# Azure CLI Module Creation Report
+
+## EXTENSION
+|CLI Extension|Command Groups|
+|---------|------------|
+|az machinelearningservices|[groups](#CommandGroups)
+
+## GROUPS
+### Command groups in `az machinelearningservices` extension
+|CLI Command Group|Group Swagger name|Commands|
+|---------|------------|--------|
+|az machinelearningservices workspace|Workspaces|[commands](#CommandsInWorkspaces)|
+|az machinelearningservices workspace-feature|WorkspaceFeatures|[commands](#CommandsInWorkspaceFeatures)|
+|az machinelearningservices usage|Usages|[commands](#CommandsInUsages)|
+|az machinelearningservices virtual-machine-size|VirtualMachineSizes|[commands](#CommandsInVirtualMachineSizes)|
+|az machinelearningservices quota|Quotas|[commands](#CommandsInQuotas)|
+|az machinelearningservices machine-learning-compute|MachineLearningCompute|[commands](#CommandsInMachineLearningCompute)|
+|az machinelearningservices||[commands](#CommandsIn)|
+|az machinelearningservices private-endpoint-connection|PrivateEndpointConnections|[commands](#CommandsInPrivateEndpointConnections)|
+|az machinelearningservices private-link-resource|PrivateLinkResources|[commands](#CommandsInPrivateLinkResources)|
+|az machinelearningservices linked-service|LinkedServices|[commands](#CommandsInLinkedServices)|
+|az machinelearningservices machine-learning-service|MachineLearningService|[commands](#CommandsInMachineLearningService)|
+|az machinelearningservices notebook|Notebooks|[commands](#CommandsInNotebooks)|
+|az machinelearningservices workspace-connection|WorkspaceConnections|[commands](#CommandsInWorkspaceConnections)|
+|az machinelearningservices code-container|CodeContainers|[commands](#CommandsInCodeContainers)|
+|az machinelearningservices code-version|CodeVersions|[commands](#CommandsInCodeVersions)|
+|az machinelearningservices component-container|ComponentContainers|[commands](#CommandsInComponentContainers)|
+|az machinelearningservices component-version|ComponentVersions|[commands](#CommandsInComponentVersions)|
+|az machinelearningservices data-container|DataContainers|[commands](#CommandsInDataContainers)|
+|az machinelearningservices datastore|Datastores|[commands](#CommandsInDatastores)|
+|az machinelearningservices data-version|DataVersions|[commands](#CommandsInDataVersions)|
+|az machinelearningservices environment-container|EnvironmentContainers|[commands](#CommandsInEnvironmentContainers)|
+|az machinelearningservices environment-specification-version|EnvironmentSpecificationVersions|[commands](#CommandsInEnvironmentSpecificationVersions)|
+|az machinelearningservices job|Jobs|[commands](#CommandsInJobs)|
+|az machinelearningservices labeling-job|LabelingJobs|[commands](#CommandsInLabelingJobs)|
+|az machinelearningservices model-container|ModelContainers|[commands](#CommandsInModelContainers)|
+|az machinelearningservices model-version|ModelVersions|[commands](#CommandsInModelVersions)|
+|az machinelearningservices online-deployment|OnlineDeployments|[commands](#CommandsInOnlineDeployments)|
+|az machinelearningservices online-endpoint|OnlineEndpoints|[commands](#CommandsInOnlineEndpoints)|
+
+## COMMANDS
+### Commands in `az machinelearningservices` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices list-sku](#ListSkus)|ListSkus|[Parameters](#ParametersListSkus)|[Example](#ExamplesListSkus)|
+
+### Commands in `az machinelearningservices code-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices code-container list](#CodeContainersList)|List|[Parameters](#ParametersCodeContainersList)|[Example](#ExamplesCodeContainersList)|
+|[az machinelearningservices code-container show](#CodeContainersGet)|Get|[Parameters](#ParametersCodeContainersGet)|[Example](#ExamplesCodeContainersGet)|
+|[az machinelearningservices code-container create](#CodeContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersCodeContainersCreateOrUpdate#Create)|[Example](#ExamplesCodeContainersCreateOrUpdate#Create)|
+|[az machinelearningservices code-container update](#CodeContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersCodeContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices code-container delete](#CodeContainersDelete)|Delete|[Parameters](#ParametersCodeContainersDelete)|[Example](#ExamplesCodeContainersDelete)|
+
+### Commands in `az machinelearningservices code-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices code-version list](#CodeVersionsList)|List|[Parameters](#ParametersCodeVersionsList)|[Example](#ExamplesCodeVersionsList)|
+|[az machinelearningservices code-version show](#CodeVersionsGet)|Get|[Parameters](#ParametersCodeVersionsGet)|[Example](#ExamplesCodeVersionsGet)|
+|[az machinelearningservices code-version create](#CodeVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersCodeVersionsCreateOrUpdate#Create)|[Example](#ExamplesCodeVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices code-version update](#CodeVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersCodeVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices code-version delete](#CodeVersionsDelete)|Delete|[Parameters](#ParametersCodeVersionsDelete)|[Example](#ExamplesCodeVersionsDelete)|
+
+### Commands in `az machinelearningservices component-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices component-container list](#ComponentContainersList)|List|[Parameters](#ParametersComponentContainersList)|[Example](#ExamplesComponentContainersList)|
+|[az machinelearningservices component-container show](#ComponentContainersGet)|Get|[Parameters](#ParametersComponentContainersGet)|[Example](#ExamplesComponentContainersGet)|
+|[az machinelearningservices component-container create](#ComponentContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersComponentContainersCreateOrUpdate#Create)|[Example](#ExamplesComponentContainersCreateOrUpdate#Create)|
+|[az machinelearningservices component-container update](#ComponentContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersComponentContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices component-container delete](#ComponentContainersDelete)|Delete|[Parameters](#ParametersComponentContainersDelete)|[Example](#ExamplesComponentContainersDelete)|
+
+### Commands in `az machinelearningservices component-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices component-version list](#ComponentVersionsList)|List|[Parameters](#ParametersComponentVersionsList)|[Example](#ExamplesComponentVersionsList)|
+|[az machinelearningservices component-version show](#ComponentVersionsGet)|Get|[Parameters](#ParametersComponentVersionsGet)|[Example](#ExamplesComponentVersionsGet)|
+|[az machinelearningservices component-version create](#ComponentVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersComponentVersionsCreateOrUpdate#Create)|[Example](#ExamplesComponentVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices component-version update](#ComponentVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersComponentVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices component-version delete](#ComponentVersionsDelete)|Delete|[Parameters](#ParametersComponentVersionsDelete)|[Example](#ExamplesComponentVersionsDelete)|
+
+### Commands in `az machinelearningservices data-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices data-container list](#DataContainersList)|List|[Parameters](#ParametersDataContainersList)|[Example](#ExamplesDataContainersList)|
+|[az machinelearningservices data-container show](#DataContainersGet)|Get|[Parameters](#ParametersDataContainersGet)|[Example](#ExamplesDataContainersGet)|
+|[az machinelearningservices data-container create](#DataContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDataContainersCreateOrUpdate#Create)|[Example](#ExamplesDataContainersCreateOrUpdate#Create)|
+|[az machinelearningservices data-container update](#DataContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDataContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices data-container delete](#DataContainersDelete)|Delete|[Parameters](#ParametersDataContainersDelete)|[Example](#ExamplesDataContainersDelete)|
+
+### Commands in `az machinelearningservices data-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices data-version list](#DataVersionsList)|List|[Parameters](#ParametersDataVersionsList)|[Example](#ExamplesDataVersionsList)|
+|[az machinelearningservices data-version show](#DataVersionsGet)|Get|[Parameters](#ParametersDataVersionsGet)|[Example](#ExamplesDataVersionsGet)|
+|[az machinelearningservices data-version create](#DataVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDataVersionsCreateOrUpdate#Create)|[Example](#ExamplesDataVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices data-version update](#DataVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDataVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices data-version delete](#DataVersionsDelete)|Delete|[Parameters](#ParametersDataVersionsDelete)|[Example](#ExamplesDataVersionsDelete)|
+
+### Commands in `az machinelearningservices datastore` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices datastore list](#DatastoresList)|List|[Parameters](#ParametersDatastoresList)|[Example](#ExamplesDatastoresList)|
+|[az machinelearningservices datastore show](#DatastoresGet)|Get|[Parameters](#ParametersDatastoresGet)|[Example](#ExamplesDatastoresGet)|
+|[az machinelearningservices datastore create](#DatastoresCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDatastoresCreateOrUpdate#Create)|[Example](#ExamplesDatastoresCreateOrUpdate#Create)|
+|[az machinelearningservices datastore update](#DatastoresCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatastoresCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices datastore delete](#DatastoresDelete)|Delete|[Parameters](#ParametersDatastoresDelete)|[Example](#ExamplesDatastoresDelete)|
+|[az machinelearningservices datastore list-secret](#DatastoresListSecrets)|ListSecrets|[Parameters](#ParametersDatastoresListSecrets)|[Example](#ExamplesDatastoresListSecrets)|
+
+### Commands in `az machinelearningservices environment-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices environment-container list](#EnvironmentContainersList)|List|[Parameters](#ParametersEnvironmentContainersList)|[Example](#ExamplesEnvironmentContainersList)|
+|[az machinelearningservices environment-container show](#EnvironmentContainersGet)|Get|[Parameters](#ParametersEnvironmentContainersGet)|[Example](#ExamplesEnvironmentContainersGet)|
+|[az machinelearningservices environment-container create](#EnvironmentContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersEnvironmentContainersCreateOrUpdate#Create)|[Example](#ExamplesEnvironmentContainersCreateOrUpdate#Create)|
+|[az machinelearningservices environment-container update](#EnvironmentContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersEnvironmentContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices environment-container delete](#EnvironmentContainersDelete)|Delete|[Parameters](#ParametersEnvironmentContainersDelete)|[Example](#ExamplesEnvironmentContainersDelete)|
+
+### Commands in `az machinelearningservices environment-specification-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices environment-specification-version list](#EnvironmentSpecificationVersionsList)|List|[Parameters](#ParametersEnvironmentSpecificationVersionsList)|[Example](#ExamplesEnvironmentSpecificationVersionsList)|
+|[az machinelearningservices environment-specification-version show](#EnvironmentSpecificationVersionsGet)|Get|[Parameters](#ParametersEnvironmentSpecificationVersionsGet)|[Example](#ExamplesEnvironmentSpecificationVersionsGet)|
+|[az machinelearningservices environment-specification-version create](#EnvironmentSpecificationVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersEnvironmentSpecificationVersionsCreateOrUpdate#Create)|[Example](#ExamplesEnvironmentSpecificationVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices environment-specification-version update](#EnvironmentSpecificationVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersEnvironmentSpecificationVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices environment-specification-version delete](#EnvironmentSpecificationVersionsDelete)|Delete|[Parameters](#ParametersEnvironmentSpecificationVersionsDelete)|[Example](#ExamplesEnvironmentSpecificationVersionsDelete)|
+
+### Commands in `az machinelearningservices job` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices job list](#JobsList)|List|[Parameters](#ParametersJobsList)|[Example](#ExamplesJobsList)|
+|[az machinelearningservices job show](#JobsGet)|Get|[Parameters](#ParametersJobsGet)|[Example](#ExamplesJobsGet)|
+|[az machinelearningservices job create](#JobsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersJobsCreateOrUpdate#Create)|[Example](#ExamplesJobsCreateOrUpdate#Create)|
+|[az machinelearningservices job update](#JobsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersJobsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices job delete](#JobsDelete)|Delete|[Parameters](#ParametersJobsDelete)|[Example](#ExamplesJobsDelete)|
+|[az machinelearningservices job cancel](#JobsCancel)|Cancel|[Parameters](#ParametersJobsCancel)|[Example](#ExamplesJobsCancel)|
+
+### Commands in `az machinelearningservices labeling-job` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices labeling-job list](#LabelingJobsList)|List|[Parameters](#ParametersLabelingJobsList)|[Example](#ExamplesLabelingJobsList)|
+|[az machinelearningservices labeling-job show](#LabelingJobsGet)|Get|[Parameters](#ParametersLabelingJobsGet)|[Example](#ExamplesLabelingJobsGet)|
+|[az machinelearningservices labeling-job create](#LabelingJobsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLabelingJobsCreateOrUpdate#Create)|[Example](#ExamplesLabelingJobsCreateOrUpdate#Create)|
+|[az machinelearningservices labeling-job update](#LabelingJobsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLabelingJobsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices labeling-job delete](#LabelingJobsDelete)|Delete|[Parameters](#ParametersLabelingJobsDelete)|[Example](#ExamplesLabelingJobsDelete)|
+|[az machinelearningservices labeling-job export-label](#LabelingJobsExportLabels)|ExportLabels|[Parameters](#ParametersLabelingJobsExportLabels)|[Example](#ExamplesLabelingJobsExportLabels)|
+|[az machinelearningservices labeling-job pause](#LabelingJobsPause)|Pause|[Parameters](#ParametersLabelingJobsPause)|[Example](#ExamplesLabelingJobsPause)|
+|[az machinelearningservices labeling-job resume](#LabelingJobsResume)|Resume|[Parameters](#ParametersLabelingJobsResume)|[Example](#ExamplesLabelingJobsResume)|
+
+### Commands in `az machinelearningservices linked-service` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices linked-service list](#LinkedServicesList)|List|[Parameters](#ParametersLinkedServicesList)|[Example](#ExamplesLinkedServicesList)|
+|[az machinelearningservices linked-service show](#LinkedServicesGet)|Get|[Parameters](#ParametersLinkedServicesGet)|[Example](#ExamplesLinkedServicesGet)|
+|[az machinelearningservices linked-service create](#LinkedServicesCreate)|Create|[Parameters](#ParametersLinkedServicesCreate)|[Example](#ExamplesLinkedServicesCreate)|
+|[az machinelearningservices linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)|
+
+### Commands in `az machinelearningservices machine-learning-compute` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices machine-learning-compute list](#MachineLearningComputeListByWorkspace)|ListByWorkspace|[Parameters](#ParametersMachineLearningComputeListByWorkspace)|[Example](#ExamplesMachineLearningComputeListByWorkspace)|
+|[az machinelearningservices machine-learning-compute show](#MachineLearningComputeGet)|Get|[Parameters](#ParametersMachineLearningComputeGet)|[Example](#ExamplesMachineLearningComputeGet)|
+|[az machinelearningservices machine-learning-compute aks create](#MachineLearningComputeCreateOrUpdate#Create#AKS)|CreateOrUpdate#Create#AKS|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#AKS)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#AKS)|
+|[az machinelearningservices machine-learning-compute aml-compute create](#MachineLearningComputeCreateOrUpdate#Create#AmlCompute)|CreateOrUpdate#Create#AmlCompute|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#AmlCompute)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#AmlCompute)|
+|[az machinelearningservices machine-learning-compute compute-instance create](#MachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|CreateOrUpdate#Create#ComputeInstance|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|
+|[az machinelearningservices machine-learning-compute data-factory create](#MachineLearningComputeCreateOrUpdate#Create#DataFactory)|CreateOrUpdate#Create#DataFactory|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#DataFactory)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#DataFactory)|
+|[az machinelearningservices machine-learning-compute data-lake-analytics create](#MachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|CreateOrUpdate#Create#DataLakeAnalytics|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|
+|[az machinelearningservices machine-learning-compute databricks create](#MachineLearningComputeCreateOrUpdate#Create#Databricks)|CreateOrUpdate#Create#Databricks|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#Databricks)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#Databricks)|
+|[az machinelearningservices machine-learning-compute hd-insight create](#MachineLearningComputeCreateOrUpdate#Create#HDInsight)|CreateOrUpdate#Create#HDInsight|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#HDInsight)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#HDInsight)|
+|[az machinelearningservices machine-learning-compute virtual-machine create](#MachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|CreateOrUpdate#Create#VirtualMachine|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|
+|[az machinelearningservices machine-learning-compute update](#MachineLearningComputeUpdate)|Update|[Parameters](#ParametersMachineLearningComputeUpdate)|[Example](#ExamplesMachineLearningComputeUpdate)|
+|[az machinelearningservices machine-learning-compute delete](#MachineLearningComputeDelete)|Delete|[Parameters](#ParametersMachineLearningComputeDelete)|[Example](#ExamplesMachineLearningComputeDelete)|
+|[az machinelearningservices machine-learning-compute list-key](#MachineLearningComputeListKeys)|ListKeys|[Parameters](#ParametersMachineLearningComputeListKeys)|[Example](#ExamplesMachineLearningComputeListKeys)|
+|[az machinelearningservices machine-learning-compute list-node](#MachineLearningComputeListNodes)|ListNodes|[Parameters](#ParametersMachineLearningComputeListNodes)|[Example](#ExamplesMachineLearningComputeListNodes)|
+|[az machinelearningservices machine-learning-compute restart](#MachineLearningComputeRestart)|Restart|[Parameters](#ParametersMachineLearningComputeRestart)|[Example](#ExamplesMachineLearningComputeRestart)|
+|[az machinelearningservices machine-learning-compute start](#MachineLearningComputeStart)|Start|[Parameters](#ParametersMachineLearningComputeStart)|[Example](#ExamplesMachineLearningComputeStart)|
+|[az machinelearningservices machine-learning-compute stop](#MachineLearningComputeStop)|Stop|[Parameters](#ParametersMachineLearningComputeStop)|[Example](#ExamplesMachineLearningComputeStop)|
+
+### Commands in `az machinelearningservices machine-learning-service` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices machine-learning-service list](#MachineLearningServiceListByWorkspace)|ListByWorkspace|[Parameters](#ParametersMachineLearningServiceListByWorkspace)|[Example](#ExamplesMachineLearningServiceListByWorkspace)|
+|[az machinelearningservices machine-learning-service show](#MachineLearningServiceGet)|Get|[Parameters](#ParametersMachineLearningServiceGet)|[Example](#ExamplesMachineLearningServiceGet)|
+|[az machinelearningservices machine-learning-service create](#MachineLearningServiceCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersMachineLearningServiceCreateOrUpdate#Create)|[Example](#ExamplesMachineLearningServiceCreateOrUpdate#Create)|
+|[az machinelearningservices machine-learning-service update](#MachineLearningServiceCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersMachineLearningServiceCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices machine-learning-service delete](#MachineLearningServiceDelete)|Delete|[Parameters](#ParametersMachineLearningServiceDelete)|[Example](#ExamplesMachineLearningServiceDelete)|
+
+### Commands in `az machinelearningservices model-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices model-container list](#ModelContainersList)|List|[Parameters](#ParametersModelContainersList)|[Example](#ExamplesModelContainersList)|
+|[az machinelearningservices model-container show](#ModelContainersGet)|Get|[Parameters](#ParametersModelContainersGet)|[Example](#ExamplesModelContainersGet)|
+|[az machinelearningservices model-container create](#ModelContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersModelContainersCreateOrUpdate#Create)|[Example](#ExamplesModelContainersCreateOrUpdate#Create)|
+|[az machinelearningservices model-container update](#ModelContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersModelContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices model-container delete](#ModelContainersDelete)|Delete|[Parameters](#ParametersModelContainersDelete)|[Example](#ExamplesModelContainersDelete)|
+
+### Commands in `az machinelearningservices model-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices model-version list](#ModelVersionsList)|List|[Parameters](#ParametersModelVersionsList)|[Example](#ExamplesModelVersionsList)|
+|[az machinelearningservices model-version show](#ModelVersionsGet)|Get|[Parameters](#ParametersModelVersionsGet)|[Example](#ExamplesModelVersionsGet)|
+|[az machinelearningservices model-version create](#ModelVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersModelVersionsCreateOrUpdate#Create)|[Example](#ExamplesModelVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices model-version update](#ModelVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersModelVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices model-version delete](#ModelVersionsDelete)|Delete|[Parameters](#ParametersModelVersionsDelete)|[Example](#ExamplesModelVersionsDelete)|
+
+### Commands in `az machinelearningservices notebook` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices notebook list-key](#NotebooksListKeys)|ListKeys|[Parameters](#ParametersNotebooksListKeys)|[Example](#ExamplesNotebooksListKeys)|
+|[az machinelearningservices notebook prepare](#NotebooksPrepare)|Prepare|[Parameters](#ParametersNotebooksPrepare)|[Example](#ExamplesNotebooksPrepare)|
+
+### Commands in `az machinelearningservices online-deployment` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices online-deployment list](#OnlineDeploymentsList)|List|[Parameters](#ParametersOnlineDeploymentsList)|[Example](#ExamplesOnlineDeploymentsList)|
+|[az machinelearningservices online-deployment show](#OnlineDeploymentsGet)|Get|[Parameters](#ParametersOnlineDeploymentsGet)|[Example](#ExamplesOnlineDeploymentsGet)|
+|[az machinelearningservices online-deployment create](#OnlineDeploymentsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersOnlineDeploymentsCreateOrUpdate#Create)|[Example](#ExamplesOnlineDeploymentsCreateOrUpdate#Create)|
+|[az machinelearningservices online-deployment update](#OnlineDeploymentsUpdate)|Update|[Parameters](#ParametersOnlineDeploymentsUpdate)|[Example](#ExamplesOnlineDeploymentsUpdate)|
+|[az machinelearningservices online-deployment delete](#OnlineDeploymentsDelete)|Delete|[Parameters](#ParametersOnlineDeploymentsDelete)|[Example](#ExamplesOnlineDeploymentsDelete)|
+|[az machinelearningservices online-deployment get-log](#OnlineDeploymentsGetLogs)|GetLogs|[Parameters](#ParametersOnlineDeploymentsGetLogs)|[Example](#ExamplesOnlineDeploymentsGetLogs)|
+
+### Commands in `az machinelearningservices online-endpoint` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices online-endpoint list](#OnlineEndpointsList)|List|[Parameters](#ParametersOnlineEndpointsList)|[Example](#ExamplesOnlineEndpointsList)|
+|[az machinelearningservices online-endpoint show](#OnlineEndpointsGet)|Get|[Parameters](#ParametersOnlineEndpointsGet)|[Example](#ExamplesOnlineEndpointsGet)|
+|[az machinelearningservices online-endpoint create](#OnlineEndpointsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersOnlineEndpointsCreateOrUpdate#Create)|[Example](#ExamplesOnlineEndpointsCreateOrUpdate#Create)|
+|[az machinelearningservices online-endpoint update](#OnlineEndpointsUpdate)|Update|[Parameters](#ParametersOnlineEndpointsUpdate)|[Example](#ExamplesOnlineEndpointsUpdate)|
+|[az machinelearningservices online-endpoint delete](#OnlineEndpointsDelete)|Delete|[Parameters](#ParametersOnlineEndpointsDelete)|[Example](#ExamplesOnlineEndpointsDelete)|
+|[az machinelearningservices online-endpoint get-token](#OnlineEndpointsGetToken)|GetToken|[Parameters](#ParametersOnlineEndpointsGetToken)|[Example](#ExamplesOnlineEndpointsGetToken)|
+|[az machinelearningservices online-endpoint list-key](#OnlineEndpointsListKeys)|ListKeys|[Parameters](#ParametersOnlineEndpointsListKeys)|[Example](#ExamplesOnlineEndpointsListKeys)|
+|[az machinelearningservices online-endpoint regenerate-key](#OnlineEndpointsRegenerateKeys)|RegenerateKeys|[Parameters](#ParametersOnlineEndpointsRegenerateKeys)|[Example](#ExamplesOnlineEndpointsRegenerateKeys)|
+
+### Commands in `az machinelearningservices private-endpoint-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-endpoint-connection show](#PrivateEndpointConnectionsGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionsGet)|[Example](#ExamplesPrivateEndpointConnectionsGet)|
+|[az machinelearningservices private-endpoint-connection delete](#PrivateEndpointConnectionsDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionsDelete)|[Example](#ExamplesPrivateEndpointConnectionsDelete)|
+|[az machinelearningservices private-endpoint-connection put](#PrivateEndpointConnectionsPut)|Put|[Parameters](#ParametersPrivateEndpointConnectionsPut)|[Example](#ExamplesPrivateEndpointConnectionsPut)|
+
+### Commands in `az machinelearningservices private-link-resource` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-link-resource list](#PrivateLinkResourcesListByWorkspace)|ListByWorkspace|[Parameters](#ParametersPrivateLinkResourcesListByWorkspace)|[Example](#ExamplesPrivateLinkResourcesListByWorkspace)|
+
+### Commands in `az machinelearningservices quota` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices quota list](#QuotasList)|List|[Parameters](#ParametersQuotasList)|[Example](#ExamplesQuotasList)|
+|[az machinelearningservices quota update](#QuotasUpdate)|Update|[Parameters](#ParametersQuotasUpdate)|[Example](#ExamplesQuotasUpdate)|
+
+### Commands in `az machinelearningservices usage` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices usage list](#UsagesList)|List|[Parameters](#ParametersUsagesList)|[Example](#ExamplesUsagesList)|
+
+### Commands in `az machinelearningservices virtual-machine-size` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices virtual-machine-size list](#VirtualMachineSizesList)|List|[Parameters](#ParametersVirtualMachineSizesList)|[Example](#ExamplesVirtualMachineSizesList)|
+
+### Commands in `az machinelearningservices workspace` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace list](#WorkspacesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersWorkspacesListByResourceGroup)|[Example](#ExamplesWorkspacesListByResourceGroup)|
+|[az machinelearningservices workspace list](#WorkspacesListBySubscription)|ListBySubscription|[Parameters](#ParametersWorkspacesListBySubscription)|[Example](#ExamplesWorkspacesListBySubscription)|
+|[az machinelearningservices workspace show](#WorkspacesGet)|Get|[Parameters](#ParametersWorkspacesGet)|[Example](#ExamplesWorkspacesGet)|
+|[az machinelearningservices workspace create](#WorkspacesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersWorkspacesCreateOrUpdate#Create)|[Example](#ExamplesWorkspacesCreateOrUpdate#Create)|
+|[az machinelearningservices workspace update](#WorkspacesUpdate)|Update|[Parameters](#ParametersWorkspacesUpdate)|[Example](#ExamplesWorkspacesUpdate)|
+|[az machinelearningservices workspace delete](#WorkspacesDelete)|Delete|[Parameters](#ParametersWorkspacesDelete)|[Example](#ExamplesWorkspacesDelete)|
+|[az machinelearningservices workspace list-key](#WorkspacesListKeys)|ListKeys|[Parameters](#ParametersWorkspacesListKeys)|[Example](#ExamplesWorkspacesListKeys)|
+|[az machinelearningservices workspace resync-key](#WorkspacesResyncKeys)|ResyncKeys|[Parameters](#ParametersWorkspacesResyncKeys)|[Example](#ExamplesWorkspacesResyncKeys)|
+
+### Commands in `az machinelearningservices workspace-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-connection list](#WorkspaceConnectionsList)|List|[Parameters](#ParametersWorkspaceConnectionsList)|[Example](#ExamplesWorkspaceConnectionsList)|
+|[az machinelearningservices workspace-connection show](#WorkspaceConnectionsGet)|Get|[Parameters](#ParametersWorkspaceConnectionsGet)|[Example](#ExamplesWorkspaceConnectionsGet)|
+|[az machinelearningservices workspace-connection create](#WorkspaceConnectionsCreate)|Create|[Parameters](#ParametersWorkspaceConnectionsCreate)|[Example](#ExamplesWorkspaceConnectionsCreate)|
+|[az machinelearningservices workspace-connection delete](#WorkspaceConnectionsDelete)|Delete|[Parameters](#ParametersWorkspaceConnectionsDelete)|[Example](#ExamplesWorkspaceConnectionsDelete)|
+
+### Commands in `az machinelearningservices workspace-feature` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-feature list](#WorkspaceFeaturesList)|List|[Parameters](#ParametersWorkspaceFeaturesList)|[Example](#ExamplesWorkspaceFeaturesList)|
+
+
+## COMMAND DETAILS
+
+### group `az machinelearningservices`
+#### Command `az machinelearningservices list-sku`
+
+##### Example
+```
+az machinelearningservices list-sku
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+### group `az machinelearningservices code-container`
+#### Command `az machinelearningservices code-container list`
+
+##### Example
+```
+az machinelearningservices code-container list --skiptoken "skiptoken" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices code-container show`
+
+##### Example
+```
+az machinelearningservices code-container show --name "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices code-container create`
+
+##### Example
+```
+az machinelearningservices code-container create --name "testContainer" --properties description="string" \
+tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--description**|string||description|description|
+
+#### Command `az machinelearningservices code-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--description**|string||description|description|
+
+#### Command `az machinelearningservices code-container delete`
+
+##### Example
+```
+az machinelearningservices code-container delete --name "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices code-version`
+#### Command `az machinelearningservices code-version list`
+
+##### Example
+```
+az machinelearningservices code-version list --name "testContainer" --skiptoken "skiptoken" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices code-version show`
+
+##### Example
+```
+az machinelearningservices code-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices code-version create`
+
+##### Example
+```
+az machinelearningservices code-version create --name "testContainer" --properties description="string" \
+assetPath={"path":"string","isDirectory":true} datastoreId="string" properties={"prop1":"value1","prop2":"value2"} \
+tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--datastore-id**|string|The asset datastoreId|datastore_id|datastoreId|
+|**--asset-path**|object|DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead|asset_path|assetPath|
+|**--path**|string|The path of the file/directory.|path|path|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices code-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--datastore-id**|string|The asset datastoreId|datastore_id|datastoreId|
+|**--asset-path**|object|DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead|asset_path|assetPath|
+|**--path**|string|The path of the file/directory.|path|path|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices code-version delete`
+
+##### Example
+```
+az machinelearningservices code-version delete --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices component-container`
+#### Command `az machinelearningservices component-container list`
+
+##### Example
+```
+az machinelearningservices component-container list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices component-container show`
+
+##### Example
+```
+az machinelearningservices component-container show --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices component-container create`
+
+##### Example
+```
+az machinelearningservices component-container create --name "testContainer" --properties description="string" \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices component-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices component-container delete`
+
+##### Example
+```
+az machinelearningservices component-container delete --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices component-version`
+#### Command `az machinelearningservices component-version list`
+
+##### Example
+```
+az machinelearningservices component-version list --name "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices component-version show`
+
+##### Example
+```
+az machinelearningservices component-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices component-version create`
+
+##### Example
+```
+az machinelearningservices component-version create --name "testContainer" --properties description="string" \
+codeConfiguration={"codeArtifactId":"string","command":"string"} component={"componentType":"CommandComponent","display\
+Name":"string","inputs":{"additionalProp1":{"description":"string","default":"string","componentInputType":"Generic","d\
+ataType":"string","optional":true},"additionalProp2":{"description":"string","default":"string","componentInputType":"G\
+eneric","dataType":"string","optional":true},"additionalProp3":{"description":"string","default":"string","componentInp\
+utType":"Generic","dataType":"string","optional":true}},"isDeterministic":true,"outputs":{"additionalProp1":{"descripti\
+on":"string","dataType":"string"},"additionalProp2":{"description":"string","dataType":"string"},"additionalProp3":{"de\
+scription":"string","dataType":"string"}}} environmentId="\\"/subscriptions/{{subscriptionId}}/resourceGroups/{{resourc\
+eGroup}}/providers/Microsoft.MachineLearningServices/workspaces/{{workspaceName}}/Environments/AzureML-Minimal\\"" \
+generatedBy="User" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--code-configuration**|object|Code configuration of the job. Includes CodeArtifactId and Command.|code_configuration|codeConfiguration|
+|**--environment-id**|string|Environment configuration of the component.|environment_id|environmentId|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--display-name**|string|DisplayName of the component on the UI. Defaults to same as name.|display_name|displayName|
+|**--is-deterministic**|boolean|Whether or not its deterministic. Defaults to true.|is_deterministic|isDeterministic|
+|**--inputs**|dictionary|Defines input ports of the component. The string key is the name of input, which should be a valid Python variable name.|inputs|inputs|
+|**--outputs**|dictionary|Defines output ports of the component. The string key is the name of Output, which should be a valid Python variable name.|outputs|outputs|
+
+#### Command `az machinelearningservices component-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--code-configuration**|object|Code configuration of the job. Includes CodeArtifactId and Command.|code_configuration|codeConfiguration|
+|**--environment-id**|string|Environment configuration of the component.|environment_id|environmentId|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--display-name**|string|DisplayName of the component on the UI. Defaults to same as name.|display_name|displayName|
+|**--is-deterministic**|boolean|Whether or not its deterministic. Defaults to true.|is_deterministic|isDeterministic|
+|**--inputs**|dictionary|Defines input ports of the component. The string key is the name of input, which should be a valid Python variable name.|inputs|inputs|
+|**--outputs**|dictionary|Defines output ports of the component. The string key is the name of Output, which should be a valid Python variable name.|outputs|outputs|
+
+#### Command `az machinelearningservices component-version delete`
+
+##### Example
+```
+az machinelearningservices component-version delete --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices data-container`
+#### Command `az machinelearningservices data-container list`
+
+##### Example
+```
+az machinelearningservices data-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices data-container show`
+
+##### Example
+```
+az machinelearningservices data-container show --name "datacontainer123" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices data-container create`
+
+##### Example
+```
+az machinelearningservices data-container create --name "datacontainer123" --properties description="string" \
+properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} --resource-group \
+"testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--description**|string||description|description|
+
+#### Command `az machinelearningservices data-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--description**|string||description|description|
+
+#### Command `az machinelearningservices data-container delete`
+
+##### Example
+```
+az machinelearningservices data-container delete --name "datacontainer123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices data-version`
+#### Command `az machinelearningservices data-version list`
+
+##### Example
+```
+az machinelearningservices data-version list --name "dataset123" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices data-version show`
+
+##### Example
+```
+az machinelearningservices data-version show --name "dataset123" --resource-group "testrg123" --version "456" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices data-version create`
+
+##### Example
+```
+az machinelearningservices data-version create --name "dataset123" --properties description="string" \
+assetPath={"path":"string","isDirectory":false} datasetType="Simple" datastoreId="string" \
+properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} --resource-group \
+"testrg123" --version "456" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--dataset-type**|choice|The Format of dataset.|dataset_type|datasetType|
+|**--datastore-id**|string|The asset datastoreId|datastore_id|datastoreId|
+|**--asset-path**|object|DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead|asset_path|assetPath|
+|**--path**|string|The path of the file/directory.|path|path|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices data-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--dataset-type**|choice|The Format of dataset.|dataset_type|datasetType|
+|**--datastore-id**|string|The asset datastoreId|datastore_id|datastoreId|
+|**--asset-path**|object|DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead|asset_path|assetPath|
+|**--path**|string|The path of the file/directory.|path|path|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices data-version delete`
+
+##### Example
+```
+az machinelearningservices data-version delete --name "dataset123" --resource-group "testrg123" --version "456" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices datastore`
+#### Command `az machinelearningservices datastore list`
+
+##### Example
+```
+az machinelearningservices datastore list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+|**--count**|integer|Maximum number of results to return.|count|count|
+|**--is-default**|boolean|Filter down to the workspace default datastore.|is_default|isDefault|
+|**--names**|array|Names of datastores to return.|names|names|
+|**--search-text**|string|Text to search for in the datastore names.|search_text|searchText|
+|**--order-by**|string|Order by property (createdtime | modifiedtime | name).|order_by|orderBy|
+|**--order-by-asc**|boolean|Order by property in ascending order.|order_by_asc|orderByAsc|
+
+#### Command `az machinelearningservices datastore show`
+
+##### Example
+```
+az machinelearningservices datastore show --name "testDatastore" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices datastore create`
+
+##### Example
+```
+az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"azureDataLake":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certifi\
+cate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-\
+b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"serviceP\
+rincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceU\
+ri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"st\
+oreName":"string"},"azureMySql":{"credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","c\
+ertificate":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717\
+-4562-b3fc-2c963f66afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"se\
+rvicePrincipal":{"authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","res\
+ourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"\
+}},"databaseName":"string","endpoint":"database.windows.net","portNumber":0,"serverName":"string"},"azurePostgreSql":{"\
+credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3\
+fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbpri\
+nt":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"s\
+tring","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa\
+85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"databaseName":"string","enable\
+SSL":true,"endpoint":"database.windows.net","portNumber":0,"serverName":"string"},"azureSqlDatabase":{"credentials":{"a\
+ccountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"string","clientId":"3fa85f64-5717-456\
+2-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","thumbprint":"string"},"d\
+atastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"authorityUrl":"string","clientId\
+":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string","tenantId":"3fa85f64-5717-4562-\
+b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"databaseName":"string","endpoint":"database.wi\
+ndows.net","portNumber":0,"serverName":"string"},"azureStorage":{"accountName":"string","blobCacheTimeout":0,"container\
+Name":"string","credentials":{"accountKey":{"key":"string"},"certificate":{"authorityUrl":"string","certificate":"strin\
+g","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","resourceUri":"string","tenantId":"3fa85f64-5717-4562-b3fc-2c963f6\
+6afa6","thumbprint":"string"},"datastoreCredentialsType":"AccountKey","sas":{"sasToken":"string"},"servicePrincipal":{"\
+authorityUrl":"string","clientId":"3fa85f64-5717-4562-b3fc-2c963f66afa6","clientSecret":"string","resourceUri":"string"\
+,"tenantId":"3fa85f64-5717-4562-b3fc-2c963f66afa6"},"sqlAdmin":{"password":"string","userId":"string"}},"endpoint":"cor\
+e.windows.net","protocol":"https"},"datastoreContentsType":"AzureBlob","glusterFs":{"serverAddress":"string","volumeNam\
+e":"string"}} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--datastore-contents-type**|choice|Storage type backing the datastore.|datastore_contents_type|datastoreContentsType|
+|**--is-default**|boolean|Whether this datastore is the default for the workspace.|is_default|isDefault|
+|**--linked-info**|object|Information about the datastore origin, if linked.|linked_info|linkedInfo|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--azure-data-lake**|object|Azure Data Lake (Gen1/2) storage information.|azure_data_lake|azureDataLake|
+|**--azure-my-sql**|object|Azure Database for MySQL information.|azure_my_sql|azureMySql|
+|**--azure-postgre-sql**|object|Azure Database for PostgreSQL information.|azure_postgre_sql|azurePostgreSql|
+|**--azure-sql-database**|object|Azure SQL Database information.|azure_sql_database|azureSqlDatabase|
+|**--azure-storage**|object|Azure storage account (blobs, files) information.|azure_storage|azureStorage|
+|**--gluster-fs**|object|GlusterFS volume information.|gluster_fs|glusterFs|
+
+#### Command `az machinelearningservices datastore update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--datastore-contents-type**|choice|Storage type backing the datastore.|datastore_contents_type|datastoreContentsType|
+|**--is-default**|boolean|Whether this datastore is the default for the workspace.|is_default|isDefault|
+|**--linked-info**|object|Information about the datastore origin, if linked.|linked_info|linkedInfo|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--azure-data-lake**|object|Azure Data Lake (Gen1/2) storage information.|azure_data_lake|azureDataLake|
+|**--azure-my-sql**|object|Azure Database for MySQL information.|azure_my_sql|azureMySql|
+|**--azure-postgre-sql**|object|Azure Database for PostgreSQL information.|azure_postgre_sql|azurePostgreSql|
+|**--azure-sql-database**|object|Azure SQL Database information.|azure_sql_database|azureSqlDatabase|
+|**--azure-storage**|object|Azure storage account (blobs, files) information.|azure_storage|azureStorage|
+|**--gluster-fs**|object|GlusterFS volume information.|gluster_fs|glusterFs|
+
+#### Command `az machinelearningservices datastore delete`
+
+##### Example
+```
+az machinelearningservices datastore delete --name "testDatastore" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices datastore list-secret`
+
+##### Example
+```
+az machinelearningservices datastore list-secret --name "testDatastore" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices environment-container`
+#### Command `az machinelearningservices environment-container list`
+
+##### Example
+```
+az machinelearningservices environment-container list --skiptoken "skiptoken" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices environment-container show`
+
+##### Example
+```
+az machinelearningservices environment-container show --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices environment-container create`
+
+##### Example
+```
+az machinelearningservices environment-container create --name "testContainer" --properties description="string" \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--description**|string||description|description|
+
+#### Command `az machinelearningservices environment-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|dictionary|Dictionary of |properties|properties|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--description**|string||description|description|
+
+#### Command `az machinelearningservices environment-container delete`
+
+##### Example
+```
+az machinelearningservices environment-container delete --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices environment-specification-version`
+#### Command `az machinelearningservices environment-specification-version list`
+
+##### Example
+```
+az machinelearningservices environment-specification-version list --name "testContainer" --skiptoken "skiptoken" \
+--resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices environment-specification-version show`
+
+##### Example
+```
+az machinelearningservices environment-specification-version show --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices environment-specification-version create`
+
+##### Example
+```
+az machinelearningservices environment-specification-version create --name "testContainer" --properties \
+description="string" condaFile="string" docker={"dockerSpecificationType":"Build","dockerfile":"string"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Name of EnvironmentSpecificationVersion.|name|name|
+|**--version**|string|Version of EnvironmentSpecificationVersion.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--docker-image**|object|Class to represent configuration settings for Docker Build|docker_image|DockerImage|
+|**--docker-build**|object|Class to represent configuration settings for Docker Build|docker_build|DockerBuild|
+|**--conda-file**|string|Standard configuration file used by conda that lets you install any kind of package, including Python, R, and C/C++ packages |conda_file|condaFile|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--liveness-route**|object|The route to check the liveness of the inference server container.|liveness_route|livenessRoute|
+|**--readiness-route**|object|The route to check the readiness of the inference server container.|readiness_route|readinessRoute|
+|**--scoring-route**|object|The port to send the scoring requests to, within the inference server container.|scoring_route|scoringRoute|
+
+#### Command `az machinelearningservices environment-specification-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Name of EnvironmentSpecificationVersion.|name|name|
+|**--version**|string|Version of EnvironmentSpecificationVersion.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--docker-image**|object|Class to represent configuration settings for Docker Build|docker_image|DockerImage|
+|**--docker-build**|object|Class to represent configuration settings for Docker Build|docker_build|DockerBuild|
+|**--conda-file**|string|Standard configuration file used by conda that lets you install any kind of package, including Python, R, and C/C++ packages |conda_file|condaFile|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--liveness-route**|object|The route to check the liveness of the inference server container.|liveness_route|livenessRoute|
+|**--readiness-route**|object|The route to check the readiness of the inference server container.|readiness_route|readinessRoute|
+|**--scoring-route**|object|The port to send the scoring requests to, within the inference server container.|scoring_route|scoringRoute|
+
+#### Command `az machinelearningservices environment-specification-version delete`
+
+##### Example
+```
+az machinelearningservices environment-specification-version delete --name "testContainer" --resource-group \
+"testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices job`
+#### Command `az machinelearningservices job list`
+
+##### Example
+```
+az machinelearningservices job list --skiptoken "skiptoken" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Example
+```
+az machinelearningservices job list --skiptoken "skiptoken" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+|**--job-type**|string|Type of job to be returned.|job_type|jobType|
+|**--tags**|string|Tags for job to be returned.|tags|tags|
+|**--tag**|string|Jobs returned will have this tag key.|tag|tag|
+
+#### Command `az machinelearningservices job show`
+
+##### Example
+```
+az machinelearningservices job show --id "testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices job show --id "testContainer" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices job create`
+
+##### Example
+```
+az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"properties\\":{\\"additionalProp\
+1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"additionalProp1\
+\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}}" --id "testContainer" \
+--resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"properties\\":{\\"additionalProp\
+1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"additionalProp1\
+\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}}" --id "testContainer" \
+--resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|object|Job base definition|properties|properties|
+
+#### Command `az machinelearningservices job update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|object|Job base definition|properties|properties|
+
+#### Command `az machinelearningservices job delete`
+
+##### Example
+```
+az machinelearningservices job delete --id "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Example
+```
+az machinelearningservices job delete --id "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices job cancel`
+
+##### Example
+```
+az machinelearningservices job cancel --id "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Example
+```
+az machinelearningservices job cancel --id "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices labeling-job`
+#### Command `az machinelearningservices labeling-job list`
+
+##### Example
+```
+az machinelearningservices labeling-job list --skiptoken "skiptoken" --count "10" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+|**--count**|integer|Number of labeling jobs to return.|count|count|
+
+#### Command `az machinelearningservices labeling-job show`
+
+##### Example
+```
+az machinelearningservices labeling-job show --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--include-job-instructions**|boolean|Boolean value to indicate whether to include JobInstructions in response.|include_job_instructions|includeJobInstructions|
+|**--include-label-categories**|boolean|Boolean value to indicate Whether to include LabelCategories in response.|include_label_categories|includeLabelCategories|
+
+#### Command `az machinelearningservices labeling-job create`
+
+##### Example
+```
+az machinelearningservices labeling-job create --properties description="string" datasetConfiguration={"assetName":"str\
+ing","datasetVersion":"string","incrementalDatasetRefreshEnabled":true} jobInstructions={"uri":"string"} \
+jobType="Labeling" labelCategories={"additionalProp1":{"allowMultiSelect":true,"classes":{"additionalProp1":{"displayNa\
+me":"string","subclasses":{}},"additionalProp2":{"displayName":"string","subclasses":{}},"additionalProp3":{"displayNam\
+e":"string","subclasses":{}}},"displayName":"string"},"additionalProp2":{"allowMultiSelect":true,"classes":{"additional\
+Prop1":{"displayName":"string","subclasses":{}},"additionalProp2":{"displayName":"string","subclasses":{}},"additionalP\
+rop3":{"displayName":"string","subclasses":{}}},"displayName":"string"},"additionalProp3":{"allowMultiSelect":true,"cla\
+sses":{"additionalProp1":{"displayName":"string","subclasses":{}},"additionalProp2":{"displayName":"string","subclasses\
+":{}},"additionalProp3":{"displayName":"string","subclasses":{}}},"displayName":"string"}} \
+labelingJobMediaProperties={"mediaType":"Image"} mlAssistConfiguration={"inferencingComputeBinding":{"computeId":"strin\
+g","nodeCount":0},"mlAssistEnabled":true,"trainingComputeBinding":{"computeId":"string","nodeCount":0}} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --id "testLabelingJob" \
+--resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--label-categories**|dictionary|Label categories of the job.|label_categories|labelCategories|
+|**--dataset-configuration**|object|Configuration of dataset used in the job.|dataset_configuration|datasetConfiguration|
+|**--labeling-job-image-properties**|object|Properties of a labeling job for image data|labeling_job_image_properties|LabelingJobImageProperties|
+|**--labeling-job-text-properties**|object|Properties of a labeling job for text data|labeling_job_text_properties|LabelingJobTextProperties|
+|**--inferencing-compute-binding**|object|AML compute binding used in inferencing.|inferencing_compute_binding|inferencingComputeBinding|
+|**--training-compute-binding**|object|AML compute binding used in training.|training_compute_binding|trainingComputeBinding|
+|**--ml-assist-enabled**|boolean|Indicates whether MLAssist feature is enabled.|ml_assist_enabled|mlAssistEnabled|
+|**--uri**|string|The link to a page with detailed labeling instructions for labelers.|uri|uri|
+
+#### Command `az machinelearningservices labeling-job update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--label-categories**|dictionary|Label categories of the job.|label_categories|labelCategories|
+|**--dataset-configuration**|object|Configuration of dataset used in the job.|dataset_configuration|datasetConfiguration|
+|**--labeling-job-image-properties**|object|Properties of a labeling job for image data|labeling_job_image_properties|LabelingJobImageProperties|
+|**--labeling-job-text-properties**|object|Properties of a labeling job for text data|labeling_job_text_properties|LabelingJobTextProperties|
+|**--inferencing-compute-binding**|object|AML compute binding used in inferencing.|inferencing_compute_binding|inferencingComputeBinding|
+|**--training-compute-binding**|object|AML compute binding used in training.|training_compute_binding|trainingComputeBinding|
+|**--ml-assist-enabled**|boolean|Indicates whether MLAssist feature is enabled.|ml_assist_enabled|mlAssistEnabled|
+|**--uri**|string|The link to a page with detailed labeling instructions for labelers.|uri|uri|
+
+#### Command `az machinelearningservices labeling-job delete`
+
+##### Example
+```
+az machinelearningservices labeling-job delete --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices labeling-job export-label`
+
+##### Example
+```
+az machinelearningservices labeling-job export-label --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--coco-export-summary**|object||coco_export_summary|CocoExportSummary|
+|**--csv-export-summary**|object||csv_export_summary|CsvExportSummary|
+|**--dataset-export-summary**|object||dataset_export_summary|DatasetExportSummary|
+
+#### Command `az machinelearningservices labeling-job pause`
+
+##### Example
+```
+az machinelearningservices labeling-job pause --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices labeling-job resume`
+
+##### Example
+```
+az machinelearningservices labeling-job resume --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices linked-service`
+#### Command `az machinelearningservices linked-service list`
+
+##### Example
+```
+az machinelearningservices linked-service list --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices linked-service show`
+
+##### Example
+```
+az machinelearningservices linked-service show --link-name "link-1" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--link-name**|string|Friendly name of the linked workspace|link_name|linkName|
+
+#### Command `az machinelearningservices linked-service create`
+
+##### Example
+```
+az machinelearningservices linked-service create --link-name "link-1" --name "link-1" --type "SystemAssigned" \
+--location "westus" --properties linked-service-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resour\
+ceGroups/resourceGroup-1/providers/Microsoft.Synapse/workspaces/Syn-1" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--link-name**|string|Friendly name of the linked workspace|link_name|linkName|
+|**--name**|string|Friendly name of the linked service|name|name|
+|**--location**|string|location of the linked service.|location|location|
+|**--properties**|object|LinkedService specific properties.|properties|properties|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices linked-service delete`
+
+##### Example
+```
+az machinelearningservices linked-service delete --link-name "link-1" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--link-name**|string|Friendly name of the linked workspace|link_name|linkName|
+
+### group `az machinelearningservices machine-learning-compute`
+#### Command `az machinelearningservices machine-learning-compute list`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices machine-learning-compute show`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute aks create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLogin\
+PortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBe\
+foreScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000\
+/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/\
+0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\"\
+,\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\
+\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\\
+"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--ak-s-compute-location**|string|Location for the underlying compute|ak_s_compute_location|computeLocation|
+|**--ak-s-description**|string|The description of the Machine Learning compute.|ak_s_description|description|
+|**--ak-s-resource-id**|string|ARM resource id of the underlying compute|ak_s_resource_id|resourceId|
+|**--ak-s-properties**|object|AKS properties|ak_s_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute aml-compute create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remo\
+teLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdl\
+eTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-00000\
+0000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/ve\
+rsions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"pers\
+onal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-0000000000\
+00\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\
+\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|aml_compute_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|aml_compute_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|aml_compute_resource_id|resourceId|
+|**--aml-compute-properties**|object|AML Compute properties|aml_compute_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute compute-instance create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Wind\
+ows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\\
+":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0\
+000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImag\
+eDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationT\
+ype\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0\
+000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\\
+":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|compute_instance_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|compute_instance_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|compute_instance_resource_id|resourceId|
+|**--compute-instance-properties**|object|Compute Instance properties|compute_instance_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute data-factory create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|data_factory_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|data_factory_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|data_factory_resource_id|resourceId|
+
+#### Command `az machinelearningservices machine-learning-compute data-lake-analytics create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|data_lake_analytics_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|data_lake_analytics_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|data_lake_analytics_resource_id|resourceId|
+|**--data-lake-store-account-name**|string|DataLake Store Account Name|data_lake_analytics_data_lake_store_account_name|dataLakeStoreAccountName|
+
+#### Command `az machinelearningservices machine-learning-compute databricks create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|databricks_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|databricks_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|databricks_resource_id|resourceId|
+|**--databricks-access-token**|string|Databricks access token|databricks_databricks_access_token|databricksAccessToken|
+
+#### Command `az machinelearningservices machine-learning-compute hd-insight create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|hd_insight_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|hd_insight_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|hd_insight_resource_id|resourceId|
+|**--ssh-port**|integer|Port open for ssh connections on the master node of the cluster.|hd_insight_ssh_port|sshPort|
+|**--address**|string|Public IP address of the master node of the cluster.|hd_insight_address|address|
+|**--administrator-account**|object|Admin credentials for master node of the cluster|hd_insight_administrator_account|administratorAccount|
+
+#### Command `az machinelearningservices machine-learning-compute virtual-machine create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|virtual_machine_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|virtual_machine_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|virtual_machine_resource_id|resourceId|
+|**--virtual-machine-size**|string|Virtual Machine size|virtual_machine_virtual_machine_size|virtualMachineSize|
+|**--ssh-port**|integer|Port open for ssh connections.|virtual_machine_ssh_port|sshPort|
+|**--address**|string|Public IP address of the virtual machine.|virtual_machine_address|address|
+|**--administrator-account**|object|Admin credentials for virtual machine|virtual_machine_administrator_account|administratorAccount|
+
+#### Command `az machinelearningservices machine-learning-compute update`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute update --compute-name "compute123" --scale-settings \
+max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--scale-settings**|object|Desired scale settings for the amlCompute.|scale_settings|scaleSettings|
+
+#### Command `az machinelearningservices machine-learning-compute delete`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group "testrg123" \
+--underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--underlying-resource-action**|choice|Delete the underlying compute if 'Delete', or detach the underlying compute from workspace if 'Detach'.|underlying_resource_action|underlyingResourceAction|
+
+#### Command `az machinelearningservices machine-learning-compute list-key`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list-key --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute list-node`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list-node --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute restart`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute restart --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute start`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute stop`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+### group `az machinelearningservices machine-learning-service`
+#### Command `az machinelearningservices machine-learning-service list`
+
+##### Example
+```
+az machinelearningservices machine-learning-service list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+|**--model-id**|string|The Model Id.|model_id|modelId|
+|**--model-name**|string|The Model name.|model_name|modelName|
+|**--tag**|string|The object tag.|tag|tag|
+|**--tags**|string|A set of tags with which to filter the returned services. It is a comma separated string of tags key or tags key=value Example: tagKey1,tagKey2,tagKey3=value3 .|tags|tags|
+|**--properties**|string|A set of properties with which to filter the returned services. It is a comma separated string of properties key and/or properties key=value Example: propKey1,propKey2,propKey3=value3 .|properties|properties|
+|**--run-id**|string|runId for model associated with service.|run_id|runId|
+|**--expand**|boolean|Set to True to include Model details.|expand|expand|
+|**--orderby**|choice|The option to order the response.|orderby|orderby|
+
+#### Command `az machinelearningservices machine-learning-service show`
+
+##### Example
+```
+az machinelearningservices machine-learning-service show --resource-group "testrg123" --service-name "service123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+|**--expand**|boolean|Set to True to include Model details.|expand|expand|
+
+#### Command `az machinelearningservices machine-learning-service create`
+
+##### Example
+```
+az machinelearningservices machine-learning-service create --properties "{\\"appInsightsEnabled\\":true,\\"authEnabled\
+\\":true,\\"computeType\\":\\"ACI\\",\\"containerResourceRequirements\\":{\\"cpu\\":1,\\"memoryInGB\\":1},\\"environmen\
+tImageRequest\\":{\\"assets\\":[{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":false,\\"url\\":\
+\\"aml://storage/azureml/score.py\\"}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{\\"name\\":\\"AzureML-Scik\
+it-learn-0.20.3\\",\\"docker\\":{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azureml/base:openmpi3.1\
+.2-ubuntu16.04\\",\\"baseImageRegistry\\":{\\"address\\":null,\\"password\\":null,\\"username\\":null}},\\"environmentV\
+ariables\\":{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"},\\"inferencingStackVersion\\":null,\\"python\\":{\\"baseCondaEn\
+vironment\\":null,\\"condaDependencies\\":{\\"name\\":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"channels\\":[\\\
+"conda-forge\\"],\\"dependencies\\":[\\"python=3.6.2\\",{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml-defaults==1.0\
+.69\\",\\"azureml-telemetry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-train-core==1.0.69\
+\\",\\"scikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}]},\\"interpreterPath\\":\\\
+"python\\",\\"userManagedDependencies\\":false},\\"spark\\":{\\"packages\\":[],\\"precachePackages\\":true,\\"repositor\
+ies\\":[]},\\"version\\":\\"3\\"},\\"models\\":[{\\"name\\":\\"sklearn_regression_model.pkl\\",\\"mimeType\\":\\"applic\
+ation/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_regression_model.pkl\\"}]},\\"location\\":\\"eastus2\\"}" \
+--resource-group "testrg123" --service-name "service456" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+|**--properties**|object|The payload that is used to create or update the Service.|properties|properties|
+
+#### Command `az machinelearningservices machine-learning-service update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+|**--properties**|object|The payload that is used to create or update the Service.|properties|properties|
+
+#### Command `az machinelearningservices machine-learning-service delete`
+
+##### Example
+```
+az machinelearningservices machine-learning-service delete --resource-group "testrg123" --service-name "service123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+
+### group `az machinelearningservices model-container`
+#### Command `az machinelearningservices model-container list`
+
+##### Example
+```
+az machinelearningservices model-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+|**--count**|integer|Maximum number of results to return.|count|count|
+
+#### Command `az machinelearningservices model-container show`
+
+##### Example
+```
+az machinelearningservices model-container show --name "testContainer" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices model-container create`
+
+##### Example
+```
+az machinelearningservices model-container create --name "testContainer" --properties description="Model container \
+description" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices model-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices model-container delete`
+
+##### Example
+```
+az machinelearningservices model-container delete --name "testContainer" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices model-version`
+#### Command `az machinelearningservices model-version list`
+
+##### Example
+```
+az machinelearningservices model-version list --name "testContainer" --resource-group "testrg123" --version "999" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Model name.|name|name|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--version**|string|Model version.|version|version|
+|**--description**|string|Model description.|description|description|
+|**--offset**|integer|Number of initial results to skip.|offset|offset|
+|**--tags**|string|Comma-separated list of tag names (and optionally values). Example: tag1,tag2=value2|tags|tags|
+|**--properties**|string|Comma-separated list of property names (and optionally values). Example: prop1,prop2=value2|properties|properties|
+
+#### Command `az machinelearningservices model-version show`
+
+##### Example
+```
+az machinelearningservices model-version show --name "testContainer" --resource-group "testrg123" --version "999" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices model-version create`
+
+##### Example
+```
+az machinelearningservices model-version create --name "testContainer" --properties description="Model version \
+description" assetPath={"path":"LocalUpload/12345/some/path","isDirectory":true} datastoreId="/subscriptions/00000000-1\
+111-2222-3333-444444444444/resourceGroups/testrg123/providers/Microsoft.MachineLearningServices/workspaces/workspace123\
+/datastores/datastore123" properties={"prop1":"value1","prop2":"value2"} stage="Production" \
+tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --version "999" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--stage**|string|Model asset stage.|stage|stage|
+|**--flavors**|dictionary|Dictionary mapping model flavors to their properties.|flavors|flavors|
+|**--datastore-id**|string|The asset datastoreId|datastore_id|datastoreId|
+|**--asset-path**|object|DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead|asset_path|assetPath|
+|**--path**|string|The path of the file/directory.|path|path|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices model-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--stage**|string|Model asset stage.|stage|stage|
+|**--flavors**|dictionary|Dictionary mapping model flavors to their properties.|flavors|flavors|
+|**--datastore-id**|string|The asset datastoreId|datastore_id|datastoreId|
+|**--asset-path**|object|DEPRECATED - use Microsoft.MachineLearning.ManagementFrontEnd.Contracts.Assets.Asset.Path instead|asset_path|assetPath|
+|**--path**|string|The path of the file/directory.|path|path|
+|**--generated-by**|choice|If the name version are system generated (anonymous registration) or user generated.|generated_by|generatedBy|
+|**--description**|string|The asset description text.|description|description|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+
+#### Command `az machinelearningservices model-version delete`
+
+##### Example
+```
+az machinelearningservices model-version delete --name "testContainer" --resource-group "testrg123" --version "999" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices notebook`
+#### Command `az machinelearningservices notebook list-key`
+
+##### Example
+```
+az machinelearningservices notebook list-key --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices notebook prepare`
+
+##### Example
+```
+az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices online-deployment`
+#### Command `az machinelearningservices online-deployment list`
+
+##### Example
+```
+az machinelearningservices online-deployment list --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Top of list.|top|$top|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices online-deployment show`
+
+##### Example
+```
+az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-deployment create`
+
+##### Example
+```
+az machinelearningservices online-deployment create --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\
+\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalPro\
+p2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"strin\
+g\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\
+\\"tenantId\\":\\"string\\"}}" --kind "string" --location "string" --properties description="string" \
+codeConfiguration={"codeArtifactId":"string","command":"string"} deploymentConfiguration={"appInsightsEnabled":true,"co\
+mputeType":"Managed","maxConcurrentRequestsPerInstance":0,"maxQueueWaitMs":0,"scoringTimeoutMs":0} \
+environmentId="string" modelReference={"assetId":"string","referenceType":"Id"} properties={"additionalProp1":"string",\
+"additionalProp2":"string","additionalProp3":"string"} scaleSettings={"instanceCount":0,"maximum":0,"minimum":0,"scaleT\
+ype":"Automatic"} --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --deployment-name \
+"testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string||location|location|
+|**--deployment-configuration**|object||deployment_configuration|deploymentConfiguration|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--kind**|string||kind|kind|
+|**--scale-settings**|object||scale_settings|scaleSettings|
+|**--description**|string|Description of the endpoint deployment.|description|description|
+|**--properties**|dictionary|Property dictionary. Properties can be added, but not removed or altered.|properties|properties|
+|**--id-asset-reference**|object||id_asset_reference|IdAssetReference|
+|**--data-path-asset-reference**|object||data_path_asset_reference|DataPathAssetReference|
+|**--output-path-asset-reference**|object||output_path_asset_reference|OutputPathAssetReference|
+|**--code-configuration**|object|Code configuration for the endpoint deployment.|code_configuration|codeConfiguration|
+|**--environment-id**|string|Environment specification for the endpoint deployment.|environment_id|environmentId|
+|**--environment-variables**|dictionary|Environment variables configuration for the deployment.|environment_variables|environmentVariables|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ResourceId of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-deployment update`
+
+##### Example
+```
+az machinelearningservices online-deployment update --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\
+\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalPro\
+p2\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"strin\
+g\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\
+\\"tenantId\\":\\"string\\"}}" --kind "string" --deployment-configuration "{\\"appInsightsEnabled\\":true,\\"computeTyp\
+e\\":\\"Managed\\",\\"maxConcurrentRequestsPerInstance\\":0,\\"maxQueueWaitMs\\":0,\\"scoringTimeoutMs\\":0}" \
+--scale-settings instance-count=0 maximum=0 minimum=0 scale-type="Automatic" --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--location**|string||location|location|
+|**--kind**|string||kind|kind|
+|**--scale-settings**|object||scale_settings|scaleSettings|
+|**--deployment-configuration**|object||deployment_configuration|deploymentConfiguration|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ResourceId of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-deployment delete`
+
+##### Example
+```
+az machinelearningservices online-deployment delete --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-deployment get-log`
+
+##### Example
+```
+az machinelearningservices online-deployment get-log --container-type "StorageInitializer" --tail 0 --deployment-name \
+"testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|The name and identifier for the endpoint.|deployment_name|deploymentName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--container-type**|choice|The type of container to retrieve logs from.|container_type|containerType|
+|**--tail**|integer|The maximum number of lines to tail.|tail|tail|
+
+### group `az machinelearningservices online-endpoint`
+#### Command `az machinelearningservices online-endpoint list`
+
+##### Example
+```
+az machinelearningservices online-endpoint list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--name**|string|Name of the endpoint.|name|name|
+|**--count**|integer|Number of endpoints to be retrieved in a page of results.|count|count|
+|**--compute-type**|choice|EndpointComputeType to be filtered by.|compute_type|computeType|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+|**--tags**|string|A set of tags with which to filter the returned models. It is a comma separated string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 .|tags|tags|
+|**--properties**|string|A set of properties with which to filter the returned models. It is a comma separated string of properties key and/or properties key=value Example: propKey1,propKey2,propKey3=value3 .|properties|properties|
+|**--order-by**|choice|The option to order the response.|order_by|orderBy|
+
+#### Command `az machinelearningservices online-endpoint show`
+
+##### Example
+```
+az machinelearningservices online-endpoint show --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint create`
+
+##### Example
+```
+az machinelearningservices online-endpoint create --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\\\
+"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp2\
+\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\
+\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\\
+"tenantId\\":\\"string\\"}}" --kind "string" --location "string" --properties description="string" authMode="AMLToken" \
+computeConfiguration={"computeType":"Managed"} properties={"additionalProp1":"string","additionalProp2":"string","addit\
+ionalProp3":"string"} trafficRules={"additionalProp1":0,"additionalProp2":0,"additionalProp3":0} --tags \
+additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string||location|location|
+|**--auth-mode**|choice|Inference endpoint authentication mode type|auth_mode|authMode|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--kind**|string||kind|kind|
+|**--description**|string|Description of the inference endpoint.|description|description|
+|**--properties**|dictionary|Property dictionary. Properties can be added, but not removed or altered.|properties|properties|
+|**--traffic-rules**|dictionary|Traffic rules on how the traffic will be routed across deployments.|traffic_rules|trafficRules|
+|**--aks-compute-configuration**|object||aks_compute_configuration|AksComputeConfiguration|
+|**--managed-compute-configuration**|object||managed_compute_configuration|ManagedComputeConfiguration|
+|**--azure-ml-compute-configuration**|object||azure_ml_compute_configuration|AzureMLComputeConfiguration|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ResourceId of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-endpoint update`
+
+##### Example
+```
+az machinelearningservices online-endpoint update --user-assigned-identities "{\\"additionalProp1\\":{\\"clientId\\":\\\
+"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\\"},\\"additionalProp2\
+\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\"tenantId\\":\\"string\
+\\"},\\"additionalProp3\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\",\\"resourceId\\":\\"string\\",\\\
+"tenantId\\":\\"string\\"}}" --kind "string" --traffic-rules additionalProp1=0 additionalProp2=0 additionalProp3=0 \
+--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--tags**|dictionary|Dictionary of |tags|tags|
+|**--location**|string||location|location|
+|**--kind**|string||kind|kind|
+|**--traffic-rules**|dictionary|Traffic rules on how the traffic will be routed across deployments.|traffic_rules|trafficRules|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ResourceId of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-endpoint delete`
+
+##### Example
+```
+az machinelearningservices online-endpoint delete --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint get-token`
+
+##### Example
+```
+az machinelearningservices online-endpoint get-token --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint list-key`
+
+##### Example
+```
+az machinelearningservices online-endpoint list-key --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint regenerate-key`
+
+##### Example
+```
+az machinelearningservices online-endpoint regenerate-key --key-type "Primary" --key-value "string" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--key-type**|choice|Specification for which type of key to generate. Primary or Secondary.|key_type|keyType|
+|**--key-value**|string|The value the key is set to.|key_value|keyValue|
+
+### group `az machinelearningservices private-endpoint-connection`
+#### Command `az machinelearningservices private-endpoint-connection show`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" --resource-group \
+"rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection delete`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection put`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState|
+
+### group `az machinelearningservices private-link-resource`
+#### Command `az machinelearningservices private-link-resource list`
+
+##### Example
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices quota`
+#### Command `az machinelearningservices quota list`
+
+##### Example
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+#### Command `az machinelearningservices quota update`
+
+##### Example
+```
+az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServices/workspaces/\
+quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningSe\
+rvices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" --value \
+type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/reso\
+urceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluste\
+r_Dedicated_vCPUs" limit=200 unit="Count"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for update quota is queried.|location|location|
+|**--value**|array|The list for update quota.|value|value|
+
+### group `az machinelearningservices usage`
+#### Command `az machinelearningservices usage list`
+
+##### Example
+```
+az machinelearningservices usage list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+### group `az machinelearningservices virtual-machine-size`
+#### Command `az machinelearningservices virtual-machine-size list`
+
+##### Example
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location upon which virtual-machine-sizes is queried.|location|location|
+
+### group `az machinelearningservices workspace`
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+#### Command `az machinelearningservices workspace show`
+
+##### Example
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace create`
+
+##### Example
+```
+az machinelearningservices workspace create --type "SystemAssigned" --location "eastus2euap" --description "test \
+description" --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/\
+providers/microsoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-4444\
+44444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \
+--key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee1122\
+33445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234\
+/providers/Microsoft.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false \
+--key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyV\
+ault/vaults/testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/0000\
+0000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbre\
+source/privateLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --sku name="Basic" tier="Basic" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace. This name in mutable|friendly_name|friendlyName|
+|**--key-vault**|string|ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created|key_vault|keyVault|
+|**--application-insights**|string|ARM id of the application insights associated with this workspace. This cannot be changed once the workspace has been created|application_insights|applicationInsights|
+|**--container-registry**|string|ARM id of the container registry associated with this workspace. This cannot be changed once the workspace has been created|container_registry|containerRegistry|
+|**--storage-account**|string|ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created|storage_account|storageAccount|
+|**--discovery-url**|string|Url for the discovery service to identify regional endpoints for machine learning experimentation services|discovery_url|discoveryUrl|
+|**--hbi-workspace**|boolean|The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service|hbi_workspace|hbiWorkspace|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--allow-public-access-when-behind-vnet**|boolean|The flag to indicate whether to allow public access when behind VNet.|allow_public_access_when_behind_vnet|allowPublicAccessWhenBehindVnet|
+|**--shared-private-link-resources**|array|The list of shared private link resources in this workspace.|shared_private_link_resources|sharedPrivateLinkResources|
+|**--status**|choice|Indicates whether or not the encryption is enabled for the workspace.|status|status|
+|**--key-vault-properties**|object|Customer Key vault properties.|key_vault_properties|keyVaultProperties|
+
+#### Command `az machinelearningservices workspace update`
+
+##### Example
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" --sku \
+name="Enterprise" tier="Enterprise" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--tags**|dictionary|The resource tags for the machine learning workspace.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace.|friendly_name|friendlyName|
+
+#### Command `az machinelearningservices workspace delete`
+
+##### Example
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-key`
+
+##### Example
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace resync-key`
+
+##### Example
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices workspace-connection`
+#### Command `az machinelearningservices workspace-connection list`
+
+##### Example
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" --target \
+"www.facebook.com" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--category**|string|Category of the workspace connection.|category|category|
+
+#### Command `az machinelearningservices workspace-connection show`
+
+##### Example
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+#### Command `az machinelearningservices workspace-connection create`
+
+##### Example
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --name "connection-1" \
+--auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+|**--name**|string|Friendly name of the workspace connection|name|name|
+|**--category**|string|Category of the workspace connection.|category|category|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--auth-type**|string|Authorization type of the workspace connection.|auth_type|authType|
+|**--value**|string|Value details of the workspace connection.|value|value|
+
+#### Command `az machinelearningservices workspace-connection delete`
+
+##### Example
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+### group `az machinelearningservices workspace-feature`
+#### Command `az machinelearningservices workspace-feature list`
+
+##### Example
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
diff --git a/src/machinelearningservices/setup.cfg b/src/machinelearningservices/setup.cfg
new file mode 100644
index 00000000000..2fdd96e5d39
--- /dev/null
+++ b/src/machinelearningservices/setup.cfg
@@ -0,0 +1 @@
+#setup.cfg
\ No newline at end of file
diff --git a/src/machinelearningservices/setup.py b/src/machinelearningservices/setup.py
new file mode 100644
index 00000000000..e4ec7166802
--- /dev/null
+++ b/src/machinelearningservices/setup.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# --------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------------------------
+
+
+from codecs import open
+from setuptools import setup, find_packages
+
+# HISTORY.rst entry.
+VERSION = '0.1.0'
+try:
+ from azext_machinelearningservices.manual.version import VERSION
+except ImportError:
+ pass
+
+# The full list of classifiers is available at
+# https://pypi.python.org/pypi?%3Aaction=list_classifiers
+CLASSIFIERS = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'License :: OSI Approved :: MIT License',
+]
+
+DEPENDENCIES = []
+
+try:
+ from azext_machinelearningservices.manual.dependency import DEPENDENCIES
+except ImportError:
+ pass
+
+with open('README.md', 'r', encoding='utf-8') as f:
+ README = f.read()
+with open('HISTORY.rst', 'r', encoding='utf-8') as f:
+ HISTORY = f.read()
+
+setup(
+ name='machinelearningservices',
+ version=VERSION,
+ description='Microsoft Azure Command-Line Tools AzureMachineLearningWorkspaces Extension',
+ author='Microsoft Corporation',
+ author_email='azpycli@microsoft.com',
+ url='https://github.com/Azure/azure-cli-extensions/tree/master/src/machinelearningservices',
+ long_description=README + '\n\n' + HISTORY,
+ license='MIT',
+ classifiers=CLASSIFIERS,
+ packages=find_packages(),
+ install_requires=DEPENDENCIES,
+ package_data={'azext_machinelearningservices': ['azext_metadata.json']},
+)