diff --git a/src/machinelearningservices/HISTORY.rst b/src/machinelearningservices/HISTORY.rst
new file mode 100644
index 00000000000..1c139576ba0
--- /dev/null
+++ b/src/machinelearningservices/HISTORY.rst
@@ -0,0 +1,8 @@
+.. :changelog:
+
+Release History
+===============
+
+0.1.0
+++++++
+* Initial release.
diff --git a/src/machinelearningservices/README.md b/src/machinelearningservices/README.md
new file mode 100644
index 00000000000..76ca6b28328
--- /dev/null
+++ b/src/machinelearningservices/README.md
@@ -0,0 +1,416 @@
+# Azure CLI machinelearningservices Extension #
+This is the extension for machinelearningservices
+
+### How to use ###
+Install this extension using the below CLI command
+```
+az extension add --name machinelearningservices
+```
+
+### Included Features ###
+#### machinelearningservices workspace ####
+##### Create #####
+```
+az machinelearningservices workspace create --type "SystemAssigned" --location "eastus2euap" \
+ --description "test description" \
+ --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/microsoft.insights/components/testinsights" \
+ --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \
+ --key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --status "Enabled" --friendly-name "HelloName" --hbi-workspace false \
+ --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" \
+ --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" \
+ --sku name="Basic" tier="Basic" --resource-group "workspace-1234" --name "testworkspace"
+
+az machinelearningservices workspace wait --created --resource-group "{rg}" --name "{myWorkspace}"
+```
+##### Show #####
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Update #####
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+ --sku name="Enterprise" tier="Enterprise" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List-key #####
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Resync-key #####
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+#### machinelearningservices workspace-feature ####
+##### List #####
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+#### machinelearningservices notebook ####
+##### Prepare #####
+```
+az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name "workspaces123"
+```
+#### machinelearningservices usage ####
+##### List #####
+```
+az machinelearningservices usage list --location "eastus"
+```
+#### machinelearningservices virtual-machine-size ####
+##### List #####
+```
+az machinelearningservices virtual-machine-size list --location "eastus" --recommended false
+```
+#### machinelearningservices quota ####
+##### List #####
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Update #####
+```
+az machinelearningservices quota update --location "eastus" \
+ --value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace1/quotas/StandardDSv2Family" limit=100 unit="Count" \
+ --value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/StandardDSv2Family" limit=200 unit="Count"
+```
+#### machinelearningservices workspace-connection ####
+##### Create #####
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --name "connection-1" \
+ --auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Show #####
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### List #####
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" \
+ --target "www.facebook.com" --workspace-name "workspace-1"
+```
+##### Delete #####
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+#### machinelearningservices machine-learning-compute ####
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" \
+ --ak-s-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" \
+ --aml-compute-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" \
+ --compute-instance-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" \
+ --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --type "SystemAssigned,UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### List #####
+```
+az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Update #####
+```
+az machinelearningservices machine-learning-compute update --compute-name "compute123" \
+ --scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### List-key #####
+```
+az machinelearningservices machine-learning-compute list-key --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List-node #####
+```
+az machinelearningservices machine-learning-compute list-node --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Restart #####
+```
+az machinelearningservices machine-learning-compute restart --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Start #####
+```
+az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Stop #####
+```
+az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group "testrg123" \
+ --underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+#### machinelearningservices ####
+##### List-sku #####
+```
+az machinelearningservices list-sku
+```
+#### machinelearningservices private-endpoint-connection ####
+##### Put #####
+```
+az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+ --private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices private-link-resource ####
+##### List #####
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/__init__.py
new file mode 100644
index 00000000000..b234b2a3aa6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/__init__.py
@@ -0,0 +1,50 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+from azure.cli.core import AzCommandsLoader
+from azext_machinelearningservices.generated._help import helps # pylint: disable=unused-import
+try:
+ from azext_machinelearningservices.manual._help import helps # pylint: disable=reimported
+except ImportError:
+ pass
+
+
+class AzureMachineLearningWorkspacesCommandsLoader(AzCommandsLoader):
+
+ def __init__(self, cli_ctx=None):
+ from azure.cli.core.commands import CliCommandType
+ from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices_cl
+ machinelearningservices_custom = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.custom#{}',
+ client_factory=cf_machinelearningservices_cl)
+ parent = super(AzureMachineLearningWorkspacesCommandsLoader, self)
+ parent.__init__(cli_ctx=cli_ctx, custom_command_type=machinelearningservices_custom)
+
+ def load_command_table(self, args):
+ from azext_machinelearningservices.generated.commands import load_command_table
+ load_command_table(self, args)
+ try:
+ from azext_machinelearningservices.manual.commands import load_command_table as load_command_table_manual
+ load_command_table_manual(self, args)
+ except ImportError:
+ pass
+ return self.command_table
+
+ def load_arguments(self, command):
+ from azext_machinelearningservices.generated._params import load_arguments
+ load_arguments(self, command)
+ try:
+ from azext_machinelearningservices.manual._params import load_arguments as load_arguments_manual
+ load_arguments_manual(self, command)
+ except ImportError:
+ pass
+
+
+COMMAND_LOADER_CLS = AzureMachineLearningWorkspacesCommandsLoader
diff --git a/src/machinelearningservices/azext_machinelearningservices/action.py b/src/machinelearningservices/azext_machinelearningservices/action.py
new file mode 100644
index 00000000000..d95d53bf711
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/action.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.action import * # noqa: F403
+try:
+ from .manual.action import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
new file mode 100644
index 00000000000..cfc30c747c7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
@@ -0,0 +1,4 @@
+{
+ "azext.isExperimental": true,
+ "azext.minCliCoreVersion": "2.15.0"
+}
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/custom.py b/src/machinelearningservices/azext_machinelearningservices/custom.py
new file mode 100644
index 00000000000..dbe9d5f9742
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/custom.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.custom import * # noqa: F403
+try:
+ from .manual.custom import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
new file mode 100644
index 00000000000..c00137220f9
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
@@ -0,0 +1,56 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+def cf_machinelearningservices_cl(cli_ctx, *_):
+ from azure.cli.core.commands.client_factory import get_mgmt_service_client
+ from azext_machinelearningservices.vendored_sdks.machinelearningservices import AzureMachineLearningWorkspaces
+ return get_mgmt_service_client(cli_ctx,
+ AzureMachineLearningWorkspaces)
+
+
+def cf_workspace(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspaces
+
+
+def cf_workspace_feature(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_features
+
+
+def cf_notebook(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).notebooks
+
+
+def cf_usage(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).usages
+
+
+def cf_virtual_machine_size(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).virtual_machine_sizes
+
+
+def cf_quota(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).quotas
+
+
+def cf_workspace_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_connections
+
+
+def cf_machine_learning_compute(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).machine_learning_compute
+
+
+def cf_private_endpoint_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_endpoint_connections
+
+
+def cf_private_link_resource(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_link_resources
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_help.py b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
new file mode 100644
index 00000000000..d0626c61b97
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
@@ -0,0 +1,894 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+
+from knack.help_files import helps
+
+
+helps['machinelearningservices workspace'] = """
+ type: group
+ short-summary: Manage workspace with machinelearningservices
+"""
+
+helps['machinelearningservices workspace list'] = """
+ type: command
+ short-summary: "Lists all the available machine learning workspaces under the specified resource group. And Lists \
+all the available machine learning workspaces under the specified subscription."
+ examples:
+ - name: Get Workspaces by Resource Group
+ text: |-
+ az machinelearningservices workspace list --resource-group "workspace-1234"
+ - name: Get Workspaces by subscription
+ text: |-
+ az machinelearningservices workspace list
+"""
+
+helps['machinelearningservices workspace show'] = """
+ type: command
+ short-summary: "Gets the properties of the specified machine learning workspace."
+ examples:
+ - name: Get Workspace
+ text: |-
+ az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace create'] = """
+ type: command
+ short-summary: "Create a workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --shared-private-link-resources
+ short-summary: "The list of shared private link resources in this workspace."
+ long-summary: |
+ Usage: --shared-private-link-resources name=XX private-link-resource-id=XX group-id=XX request-message=XX \
+status=XX
+
+ name: Unique name of the private link.
+ private-link-resource-id: The resource id that private link links to.
+ group-id: The private link resource group id.
+ request-message: Request message.
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+
+ Multiple actions can be specified by using more than one --shared-private-link-resources argument.
+ - name: --key-vault-properties
+ short-summary: "Customer Key vault properties."
+ long-summary: |
+ Usage: --key-vault-properties key-vault-arm-id=XX key-identifier=XX identity-client-id=XX
+
+ key-vault-arm-id: Required. The ArmId of the keyVault where the customer owned encryption key is present.
+ key-identifier: Required. Key vault uri to access the encryption key.
+ identity-client-id: For future use - The client id of the identity which will be used to access key vault.
+ examples:
+ - name: Create Workspace
+ text: |-
+ az machinelearningservices workspace create --type "SystemAssigned" --location "eastus2euap" \
+--description "test description" --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGr\
+oups/workspace-1234/providers/microsoft.insights/components/testinsights" --container-registry \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistr\
+y/registries/testRegistry" --key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/\
+keys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/re\
+sourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" \
+--hbi-workspace false --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/pr\
+oviders/Microsoft.KeyVault/vaults/testkv" --shared-private-link-resources name="testdbresource" \
+private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/M\
+icrosoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please \
+approve" status="Approved" --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accoun\
+tcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" --sku name="Basic" tier="Basic" \
+--resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace update'] = """
+ type: command
+ short-summary: "Updates a machine learning workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Update Workspace
+ text: |-
+ az machinelearningservices workspace update --description "new description" --friendly-name "New \
+friendly name" --sku name="Enterprise" tier="Enterprise" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace delete'] = """
+ type: command
+ short-summary: "Deletes a machine learning workspace."
+ examples:
+ - name: Delete Workspace
+ text: |-
+ az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-key'] = """
+ type: command
+ short-summary: "Lists all the keys associated with this workspace. This includes keys for the storage account, app \
+insights and password for container registry."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace resync-key'] = """
+ type: command
+ short-summary: "Resync all the keys associated with this workspace. This includes keys for the storage account, \
+app insights and password for container registry."
+ examples:
+ - name: Resync Workspace Keys
+ text: |-
+ az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices workspace is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+created.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--created
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+deleted.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--deleted
+"""
+
+helps['machinelearningservices workspace-feature'] = """
+ type: group
+ short-summary: Manage workspace feature with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-feature list'] = """
+ type: command
+ short-summary: "Lists all enabled features for a workspace."
+ examples:
+ - name: List Workspace features
+ text: |-
+ az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices notebook'] = """
+ type: group
+ short-summary: Manage notebook with machinelearningservices
+"""
+
+helps['machinelearningservices notebook prepare'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: Prepare Notebook
+ text: |-
+ az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices usage'] = """
+ type: group
+ short-summary: Manage usage with machinelearningservices
+"""
+
+helps['machinelearningservices usage list'] = """
+ type: command
+ short-summary: "Gets the current usage information as well as limits for AML resources for given subscription and \
+location."
+ examples:
+ - name: List Usages
+ text: |-
+ az machinelearningservices usage list --location "eastus"
+"""
+
+helps['machinelearningservices virtual-machine-size'] = """
+ type: group
+ short-summary: Manage virtual machine size with machinelearningservices
+"""
+
+helps['machinelearningservices virtual-machine-size list'] = """
+ type: command
+ short-summary: "Returns supported VM Sizes in a location."
+ examples:
+ - name: List VM Sizes
+ text: |-
+ az machinelearningservices virtual-machine-size list --location "eastus" --recommended false
+"""
+
+helps['machinelearningservices quota'] = """
+ type: group
+ short-summary: Manage quota with machinelearningservices
+"""
+
+helps['machinelearningservices quota list'] = """
+ type: command
+ short-summary: "Gets the currently assigned Workspace Quotas based on VMFamily."
+ examples:
+ - name: List workspace quotas by VMFamily
+ text: |-
+ az machinelearningservices quota list --location "eastus"
+"""
+
+helps['machinelearningservices quota update'] = """
+ type: command
+ short-summary: "Update quota for each VM family in workspace."
+ parameters:
+ - name: --value
+ short-summary: "The list for update quota."
+ long-summary: |
+ Usage: --value id=XX type=XX limit=XX unit=XX
+
+ id: Specifies the resource ID.
+ type: Specifies the resource type.
+ limit: The maximum permitted quota of the resource.
+ unit: An enum describing the unit of quota measurement.
+
+ Multiple actions can be specified by using more than one --value argument.
+ examples:
+ - name: update quotas
+ text: |-
+ az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServi\
+ces/workspaces/dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/provide\
+rs/Microsoft.MachineLearningServices/workspaces/demo_workspace1/quotas/StandardDSv2Family" limit=100 unit="Count" \
+--value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000\
+-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Stan\
+dardDSv2Family" limit=200 unit="Count"
+"""
+
+helps['machinelearningservices workspace-connection'] = """
+ type: group
+ short-summary: Manage workspace connection with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-connection list'] = """
+ type: command
+ short-summary: "List all connections under a AML workspace."
+ examples:
+ - name: ListWorkspaceConnections
+ text: |-
+ az machinelearningservices workspace-connection list --category "ACR" --resource-group \
+"resourceGroup-1" --target "www.facebook.com" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection show'] = """
+ type: command
+ short-summary: "Get the detail of a workspace connection."
+ examples:
+ - name: GetWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection create'] = """
+ type: command
+ short-summary: "Add a new workspace connection."
+ examples:
+ - name: CreateWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection create --connection-name "connection-1" --name \
+"connection-1" --auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection delete'] = """
+ type: command
+ short-summary: "Delete a workspace connection."
+ examples:
+ - name: DeleteWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+--resource-group "resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices machine-learning-compute'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices
+"""
+
+helps['machinelearningservices machine-learning-compute list'] = """
+ type: command
+ short-summary: "Gets computes in specified workspace."
+ examples:
+ - name: Get Computes
+ text: |-
+ az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute show'] = """
+ type: command
+ short-summary: "Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not \
+returned - use 'keys' nested resource to get them."
+ examples:
+ - name: Get a AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Get a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Get a ComputeInstance
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute aks'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group aks
+"""
+
+helps['machinelearningservices machine-learning-compute aks create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --ak-s-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\
+\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\
+\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicAccess\\":\\"\
+Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute aml-compute'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group aml-compute
+"""
+
+helps['machinelearningservices machine-learning-compute aml-compute create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--type "SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-00000000000\
+0/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+--location "eastus" --aml-compute-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\"\
+:{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\
+\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshP\
+ublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute compute-instance'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group compute-instance
+"""
+
+helps['machinelearningservices machine-learning-compute compute-instance create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--type "SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-00000000000\
+0/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+--location "eastus" --compute-instance-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettin\
+gs\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedi\
+cated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\\
+"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute data-factory'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group data-factory
+"""
+
+helps['machinelearningservices machine-learning-compute data-factory create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--type "SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-00000000000\
+0/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute data-lake-analytics'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group data-lake-analytics
+"""
+
+helps['machinelearningservices machine-learning-compute data-lake-analytics create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --type "SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-000\
+0-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\
+\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute databricks'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group databricks
+"""
+
+helps['machinelearningservices machine-learning-compute databricks create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--type "SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-00000000000\
+0/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute hd-insight'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group hd-insight
+"""
+
+helps['machinelearningservices machine-learning-compute hd-insight create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --administrator-account
+ short-summary: "Admin credentials for master node of the cluster"
+ long-summary: |
+ Usage: --administrator-account username=XX password=XX public-key-data=XX private-key-data=XX
+
+ username: Username of admin account
+ password: Password of admin account
+ public-key-data: Public key data
+ private-key-data: Private key data
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--type "SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-00000000000\
+0/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute virtual-machine'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group virtual-machine
+"""
+
+helps['machinelearningservices machine-learning-compute virtual-machine create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --administrator-account
+ short-summary: "Admin credentials for virtual machine"
+ long-summary: |
+ Usage: --administrator-account username=XX password=XX public-key-data=XX private-key-data=XX
+
+ username: Username of admin account
+ password: Password of admin account
+ public-key-data: Public key data
+ private-key-data: Private key data
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--type "SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-00000000000\
+0/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute update'] = """
+ type: command
+ short-summary: "Updates properties of a compute. This call will overwrite a compute if it exists. This is a \
+nonrecoverable operation."
+ parameters:
+ - name: --scale-settings
+ short-summary: "Desired scale settings for the amlCompute."
+ long-summary: |
+ Usage: --scale-settings max-node-count=XX min-node-count=XX node-idle-time-before-scale-down=XX
+
+ max-node-count: Required. Max number of nodes to use
+ min-node-count: Min number of nodes to use
+ node-idle-time-before-scale-down: Node Idle Time before scaling down amlCompute. This string needs to be \
+in the RFC Format.
+ examples:
+ - name: Update a AmlCompute Compute
+ text: |-
+ az machinelearningservices machine-learning-compute update --compute-name "compute123" --scale-settings \
+max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute delete'] = """
+ type: command
+ short-summary: "Deletes specified Machine Learning compute."
+ examples:
+ - name: Delete Compute
+ text: |-
+ az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group \
+"testrg123" --underlying-resource-action "Delete" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute list-key'] = """
+ type: command
+ short-summary: "Gets secrets related to Machine Learning compute (storage keys, service credentials, etc)."
+ examples:
+ - name: List AKS Compute Keys
+ text: |-
+ az machinelearningservices machine-learning-compute list-key --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute list-node'] = """
+ type: command
+ short-summary: "Get the details (e.g IP address, port etc) of all the compute nodes in the compute."
+ examples:
+ - name: Get compute nodes information for a compute
+ text: |-
+ az machinelearningservices machine-learning-compute list-node --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute restart'] = """
+ type: command
+ short-summary: "Posts a restart action to a compute instance."
+ examples:
+ - name: Restart ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute restart --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute start'] = """
+ type: command
+ short-summary: "Posts a start action to a compute instance."
+ examples:
+ - name: Start ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute stop'] = """
+ type: command
+ short-summary: "Posts a stop action to a compute instance."
+ examples:
+ - name: Stop ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \
+machine-learning-compute is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully created.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully updated.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully deleted.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --deleted
+"""
+
+helps['machinelearningservices'] = """
+ type: group
+ short-summary: Manage with machinelearningservices
+"""
+
+helps['machinelearningservices list-sku'] = """
+ type: command
+ short-summary: "Lists all skus with associated features."
+ examples:
+ - name: List Skus
+ text: |-
+ az machinelearningservices list-sku
+"""
+
+helps['machinelearningservices private-endpoint-connection'] = """
+ type: group
+ short-summary: Manage private endpoint connection with machinelearningservices
+"""
+
+helps['machinelearningservices private-endpoint-connection show'] = """
+ type: command
+ short-summary: "Gets the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceGetPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection delete'] = """
+ type: command
+ short-summary: "Deletes the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceDeletePrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection put'] = """
+ type: command
+ short-summary: "Update the state of specified private endpoint connection associated with the workspace."
+ parameters:
+ - name: --private-link-service-connection-state
+ short-summary: "A collection of information about the state of the connection between service consumer and \
+provider."
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+ description: The reason for approval/rejection of the connection.
+ actions-required: A message indicating if changes on the service provider require any updates on the \
+consumer.
+ examples:
+ - name: WorkspacePutPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \
+private-endpoint-connection is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices private-endpoint-connection is \
+successfully deleted.
+ text: |-
+ az machinelearningservices private-endpoint-connection wait --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace" --deleted
+"""
+
+helps['machinelearningservices private-link-resource'] = """
+ type: group
+ short-summary: Manage private link resource with machinelearningservices
+"""
+
+helps['machinelearningservices private-link-resource list'] = """
+ type: command
+ short-summary: "Gets the private link resources that need to be created for a workspace."
+ examples:
+ - name: WorkspaceListPrivateLinkResources
+ text: |-
+ az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+"""
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_params.py b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
new file mode 100644
index 00000000000..a437eb0417f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
@@ -0,0 +1,447 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+# pylint: disable=too-many-statements
+
+from azure.cli.core.commands.parameters import (
+ tags_type,
+ get_three_state_flag,
+ get_enum_type,
+ resource_group_name_type,
+ get_location_type
+)
+from azure.cli.core.commands.validators import (
+ get_default_location_from_resource_group,
+ validate_file_or_dict
+)
+from azext_machinelearningservices.action import (
+ AddSku,
+ AddSharedPrivateLinkResources,
+ AddKeyVaultProperties,
+ AddValue,
+ AddAdministratorAccount,
+ AddScaleSettings,
+ AddPrivateLinkServiceConnectionState
+)
+
+
+def load_arguments(self, _):
+
+ with self.argument_context('machinelearningservices workspace list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices workspace show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace. This name in mutable')
+ c.argument('key_vault', type=str, help='ARM id of the key vault associated with this workspace. This cannot be '
+ 'changed once the workspace has been created')
+ c.argument('application_insights', type=str, help='ARM id of the application insights associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('container_registry', type=str, help='ARM id of the container registry associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('storage_account', type=str, help='ARM id of the storage account associated with this workspace. '
+ 'This cannot be changed once the workspace has been created')
+ c.argument('discovery_url', type=str, help='Url for the discovery service to identify regional endpoints for '
+ 'machine learning experimentation services')
+ c.argument('hbi_workspace', arg_type=get_three_state_flag(), help='The flag to signal HBI data in the '
+ 'workspace and reduce diagnostic data collected by the service')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('allow_public_access_when_behind_vnet', arg_type=get_three_state_flag(), help='The flag to indicate '
+ 'whether to allow public access when behind VNet.')
+ c.argument('shared_private_link_resources', action=AddSharedPrivateLinkResources, nargs='+', help='The list of '
+ 'shared private link resources in this workspace.')
+ c.argument('status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or not the '
+ 'encryption is enabled for the workspace.', arg_group='Encryption')
+ c.argument('key_vault_properties', action=AddKeyVaultProperties, nargs='+', help='Customer Key vault '
+ 'properties.', arg_group='Encryption')
+
+ with self.argument_context('machinelearningservices workspace update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace.')
+
+ with self.argument_context('machinelearningservices workspace delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace resync-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace-feature list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices notebook prepare') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices usage list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices virtual-machine-size list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+ c.argument('compute_type', type=str, help='Type of compute to filter by.')
+ c.argument('recommended', arg_type=get_three_state_flag(), help='Specifies whether to return recommended vm '
+ 'sizes or all vm sizes')
+
+ with self.argument_context('machinelearningservices quota list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota update') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name')
+ c.argument('value', action=AddValue, nargs='+', help='The list for update quota.')
+
+ with self.argument_context('machinelearningservices workspace-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices workspace-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection')
+ c.argument('name', type=str, help='Friendly name of the workspace connection')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('auth_type', type=str, help='Authorization type of the workspace connection.')
+ c.argument('value', type=str, help='Value details of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('skiptoken', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute aks create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('ak_s_compute_location', type=str, help='Location for the underlying compute')
+ c.argument('ak_s_description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('ak_s_resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('ak_s_properties', type=validate_file_or_dict, help='AKS properties Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute aml-compute create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('aml_compute_properties', type=validate_file_or_dict, help='AML Compute properties Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute compute-instance create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('compute_instance_properties', type=validate_file_or_dict, help='Compute Instance properties '
+ 'Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute data-factory create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+
+ with self.argument_context('machinelearningservices machine-learning-compute data-lake-analytics create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('data_lake_store_account_name', type=str, help='DataLake Store Account Name')
+
+ with self.argument_context('machinelearningservices machine-learning-compute databricks create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('databricks_access_token', type=str, help='Databricks access token')
+
+ with self.argument_context('machinelearningservices machine-learning-compute hd-insight create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('ssh_port', type=int, help='Port open for ssh connections on the master node of the cluster.')
+ c.argument('address', type=str, help='Public IP address of the master node of the cluster.')
+ c.argument('administrator_account', action=AddAdministratorAccount, nargs='+', help='Admin credentials for '
+ 'master node of the cluster')
+
+ with self.argument_context('machinelearningservices machine-learning-compute virtual-machine create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities '
+ 'associated with resource. The user identity dictionary key references will be ARM resource ids in '
+ 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.'
+ 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('virtual_machine_size', type=str, help='Virtual Machine size')
+ c.argument('ssh_port', type=int, help='Port open for ssh connections.')
+ c.argument('address', type=str, help='Public IP address of the virtual machine.')
+ c.argument('administrator_account', action=AddAdministratorAccount, nargs='+', help='Admin credentials for '
+ 'virtual machine')
+
+ with self.argument_context('machinelearningservices machine-learning-compute update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+ c.argument('scale_settings', action=AddScaleSettings, nargs='+', help='Desired scale settings for the '
+ 'amlCompute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+ c.argument('underlying_resource_action', arg_type=get_enum_type(['Delete', 'Detach']), help='Delete the '
+ 'underlying compute if \'Delete\', or detach the underlying compute from workspace if \'Detach\'.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list-node') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute restart') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute start') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute stop') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection put') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='A collection of information about the state of the connection between service consumer and '
+ 'provider.')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-link-resource list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
new file mode 100644
index 00000000000..b33a44c1ebf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
@@ -0,0 +1,9 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/action.py b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
new file mode 100644
index 00000000000..a185440b78b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
@@ -0,0 +1,225 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access
+
+import argparse
+from collections import defaultdict
+from knack.util import CLIError
+
+
+class AddSku(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sku = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'tier':
+ d['tier'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, tier'.
+ format(k))
+ return d
+
+
+class AddSharedPrivateLinkResources(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddSharedPrivateLinkResources, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'private-link-resource-id':
+ d['private_link_resource_id'] = v[0]
+ elif kl == 'group-id':
+ d['group_id'] = v[0]
+ elif kl == 'request-message':
+ d['request_message'] = v[0]
+ elif kl == 'status':
+ d['status'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter shared_private_link_resources. All '
+ 'possible keys are: name, private-link-resource-id, group-id, request-message, status'.
+ format(k))
+ return d
+
+
+class AddKeyVaultProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.key_vault_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'key-vault-arm-id':
+ d['key_vault_arm_id'] = v[0]
+ elif kl == 'key-identifier':
+ d['key_identifier'] = v[0]
+ elif kl == 'identity-client-id':
+ d['identity_client_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter key_vault_properties. All possible keys '
+ 'are: key-vault-arm-id, key-identifier, identity-client-id'.format(k))
+ return d
+
+
+class AddValue(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddValue, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'id':
+ d['id'] = v[0]
+ elif kl == 'type':
+ d['type'] = v[0]
+ elif kl == 'limit':
+ d['limit'] = v[0]
+ elif kl == 'unit':
+ d['unit'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter value. All possible keys are: id, type, '
+ 'limit, unit'.format(k))
+ return d
+
+
+class AddAdministratorAccount(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.administrator_account = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'username':
+ d['username'] = v[0]
+ elif kl == 'password':
+ d['password'] = v[0]
+ elif kl == 'public-key-data':
+ d['public_key_data'] = v[0]
+ elif kl == 'private-key-data':
+ d['private_key_data'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter administrator_account. All possible keys '
+ 'are: username, password, public-key-data, private-key-data'.format(k))
+ return d
+
+
+class AddScaleSettings(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scale_settings = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['min_node_count'] = 0
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'max-node-count':
+ d['max_node_count'] = v[0]
+ elif kl == 'min-node-count':
+ d['min_node_count'] = v[0]
+ elif kl == 'node-idle-time-before-scale-down':
+ d['node_idle_time_before_scale_down'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scale_settings. All possible keys are: '
+ 'max-node-count, min-node-count, node-idle-time-before-scale-down'.format(k))
+ return d
+
+
+class AddPrivateLinkServiceConnectionState(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.private_link_service_connection_state = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'status':
+ d['status'] = v[0]
+ elif kl == 'description':
+ d['description'] = v[0]
+ elif kl == 'actions-required':
+ d['actions_required'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. '
+ 'All possible keys are: status, description, actions-required'.format(k))
+ return d
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/commands.py b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
new file mode 100644
index 00000000000..712b906364f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
@@ -0,0 +1,162 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-statements
+# pylint: disable=too-many-locals
+
+from azure.cli.core.commands import CliCommandType
+
+
+def load_command_table(self, _):
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace
+ machinelearningservices_workspace = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspaces_ope'
+ 'rations#WorkspacesOperations.{}',
+ client_factory=cf_workspace)
+ with self.command_group('machinelearningservices workspace', machinelearningservices_workspace,
+ client_factory=cf_workspace) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_show')
+ g.custom_command('create', 'machinelearningservices_workspace_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_workspace_update')
+ g.custom_command('delete', 'machinelearningservices_workspace_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_workspace_list_key')
+ g.custom_command('resync-key', 'machinelearningservices_workspace_resync_key')
+ g.custom_wait_command('wait', 'machinelearningservices_workspace_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_feature
+ machinelearningservices_workspace_feature = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_feat'
+ 'ures_operations#WorkspaceFeaturesOperations.{}',
+ client_factory=cf_workspace_feature)
+ with self.command_group('machinelearningservices workspace-feature', machinelearningservices_workspace_feature,
+ client_factory=cf_workspace_feature) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_feature_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_notebook
+ machinelearningservices_notebook = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._notebooks_oper'
+ 'ations#NotebooksOperations.{}',
+ client_factory=cf_notebook)
+ with self.command_group('machinelearningservices notebook', machinelearningservices_notebook,
+ client_factory=cf_notebook) as g:
+ g.custom_command('prepare', 'machinelearningservices_notebook_prepare')
+
+ from azext_machinelearningservices.generated._client_factory import cf_usage
+ machinelearningservices_usage = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._usages_operati'
+ 'ons#UsagesOperations.{}',
+ client_factory=cf_usage)
+ with self.command_group('machinelearningservices usage', machinelearningservices_usage,
+ client_factory=cf_usage) as g:
+ g.custom_command('list', 'machinelearningservices_usage_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_virtual_machine_size
+ machinelearningservices_virtual_machine_size = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._virtual_machin'
+ 'e_sizes_operations#VirtualMachineSizesOperations.{}',
+ client_factory=cf_virtual_machine_size)
+ with self.command_group('machinelearningservices virtual-machine-size',
+ machinelearningservices_virtual_machine_size,
+ client_factory=cf_virtual_machine_size) as g:
+ g.custom_command('list', 'machinelearningservices_virtual_machine_size_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_quota
+ machinelearningservices_quota = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._quotas_operati'
+ 'ons#QuotasOperations.{}',
+ client_factory=cf_quota)
+ with self.command_group('machinelearningservices quota', machinelearningservices_quota,
+ client_factory=cf_quota) as g:
+ g.custom_command('list', 'machinelearningservices_quota_list')
+ g.custom_command('update', 'machinelearningservices_quota_update')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_connection
+ machinelearningservices_workspace_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_conn'
+ 'ections_operations#WorkspaceConnectionsOperations.{}',
+ client_factory=cf_workspace_connection)
+ with self.command_group('machinelearningservices workspace-connection',
+ machinelearningservices_workspace_connection,
+ client_factory=cf_workspace_connection) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_connection_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_connection_show')
+ g.custom_command('create', 'machinelearningservices_workspace_connection_create')
+ g.custom_command('delete', 'machinelearningservices_workspace_connection_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_machine_learning_compute
+ machinelearningservices_machine_learning_compute = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._machine_learni'
+ 'ng_compute_operations#MachineLearningComputeOperations.{}',
+ client_factory=cf_machine_learning_compute)
+ with self.command_group('machinelearningservices machine-learning-compute',
+ machinelearningservices_machine_learning_compute,
+ client_factory=cf_machine_learning_compute) as g:
+ g.custom_command('list', 'machinelearningservices_machine_learning_compute_list')
+ g.custom_show_command('show', 'machinelearningservices_machine_learning_compute_show')
+ g.custom_command('aks create', 'machinelearningservices_machine_learning_compute_aks_create',
+ supports_no_wait=True)
+ g.custom_command('aml-compute create', 'machinelearningservices_machine_learning_compute_aml_compute_create',
+ supports_no_wait=True)
+ g.custom_command('compute-instance create', 'machinelearningservices_machine_learning_compute_compute_instance_'
+ 'create', supports_no_wait=True)
+ g.custom_command('data-factory create', 'machinelearningservices_machine_learning_compute_data_factory_create',
+ supports_no_wait=True)
+ g.custom_command('data-lake-analytics create', 'machinelearningservices_machine_learning_compute_data_lake_anal'
+ 'ytics_create', supports_no_wait=True)
+ g.custom_command('databricks create', 'machinelearningservices_machine_learning_compute_databricks_create',
+ supports_no_wait=True)
+ g.custom_command('hd-insight create', 'machinelearningservices_machine_learning_compute_hd_insight_create',
+ supports_no_wait=True)
+ g.custom_command('virtual-machine create', 'machinelearningservices_machine_learning_compute_virtual_machine_cr'
+ 'eate', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_machine_learning_compute_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_machine_learning_compute_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_machine_learning_compute_list_key')
+ g.custom_command('list-node', 'machinelearningservices_machine_learning_compute_list_node')
+ g.custom_command('restart', 'machinelearningservices_machine_learning_compute_restart')
+ g.custom_command('start', 'machinelearningservices_machine_learning_compute_start')
+ g.custom_command('stop', 'machinelearningservices_machine_learning_compute_stop')
+ g.custom_wait_command('wait', 'machinelearningservices_machine_learning_compute_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices
+ machinelearningservices_ = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._model_operatio'
+ 'ns#AzureMachineLearningWorkspacesOperationsMixin.{}',
+ client_factory=cf_machinelearningservices)
+ with self.command_group('machinelearningservices', machinelearningservices_,
+ client_factory=cf_machinelearningservices, is_experimental=True) as g:
+ g.custom_command('list-sku', 'machinelearningservices_list_sku')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_endpoint_connection
+ machinelearningservices_private_endpoint_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_endpoi'
+ 'nt_connections_operations#PrivateEndpointConnectionsOperations.{}',
+ client_factory=cf_private_endpoint_connection)
+ with self.command_group('machinelearningservices private-endpoint-connection',
+ machinelearningservices_private_endpoint_connection,
+ client_factory=cf_private_endpoint_connection) as g:
+ g.custom_show_command('show', 'machinelearningservices_private_endpoint_connection_show')
+ g.custom_command('delete', 'machinelearningservices_private_endpoint_connection_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('put', 'machinelearningservices_private_endpoint_connection_put')
+ g.custom_wait_command('wait', 'machinelearningservices_private_endpoint_connection_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_link_resource
+ machinelearningservices_private_link_resource = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_link_r'
+ 'esources_operations#PrivateLinkResourcesOperations.{}',
+ client_factory=cf_private_link_resource)
+ with self.command_group('machinelearningservices private-link-resource',
+ machinelearningservices_private_link_resource,
+ client_factory=cf_private_link_resource) as g:
+ g.custom_command('list', 'machinelearningservices_private_link_resource_list')
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/custom.py b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
new file mode 100644
index 00000000000..c9239de04d6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
@@ -0,0 +1,645 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=line-too-long
+# pylint: disable=too-many-lines
+
+from azure.cli.core.util import sdk_no_wait
+
+
+def machinelearningservices_workspace_list(client,
+ resource_group_name=None,
+ skiptoken=None):
+ if resource_group_name:
+ return client.list_by_resource_group(resource_group_name=resource_group_name,
+ skiptoken=skiptoken)
+ return client.list_by_subscription(skiptoken=skiptoken)
+
+
+def machinelearningservices_workspace_show(client,
+ resource_group_name,
+ workspace_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_create(client,
+ resource_group_name,
+ workspace_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ description=None,
+ friendly_name=None,
+ key_vault=None,
+ application_insights=None,
+ container_registry=None,
+ storage_account=None,
+ discovery_url=None,
+ hbi_workspace=None,
+ image_build_compute=None,
+ allow_public_access_when_behind_vnet=None,
+ shared_private_link_resources=None,
+ status=None,
+ key_vault_properties=None,
+ no_wait=False):
+ if hbi_workspace is None:
+ hbi_workspace = False
+ if allow_public_access_when_behind_vnet is None:
+ allow_public_access_when_behind_vnet = False
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['friendly_name'] = friendly_name
+ parameters['key_vault'] = key_vault
+ parameters['application_insights'] = application_insights
+ parameters['container_registry'] = container_registry
+ parameters['storage_account'] = storage_account
+ parameters['discovery_url'] = discovery_url
+ parameters['hbi_workspace'] = False if hbi_workspace is None else hbi_workspace
+ parameters['image_build_compute'] = image_build_compute
+ parameters['allow_public_access_when_behind_vnet'] = False if allow_public_access_when_behind_vnet is None else allow_public_access_when_behind_vnet
+ parameters['shared_private_link_resources'] = shared_private_link_resources
+ parameters['encryption'] = {}
+ parameters['encryption']['status'] = status
+ parameters['encryption']['key_vault_properties'] = key_vault_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_update(client,
+ resource_group_name,
+ workspace_name,
+ tags=None,
+ sku=None,
+ description=None,
+ friendly_name=None):
+ parameters = {}
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['description'] = description
+ parameters['friendly_name'] = friendly_name
+ return client.update(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_delete(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_resync_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.resync_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_feature_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_notebook_prepare(client,
+ resource_group_name,
+ workspace_name):
+ return client.begin_prepare(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_usage_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_virtual_machine_size_list(client,
+ location,
+ compute_type=None,
+ recommended=None):
+ return client.list(location=location,
+ compute_type=compute_type,
+ recommended=recommended)
+
+
+def machinelearningservices_quota_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_update(client,
+ location,
+ value=None):
+ parameters = {}
+ parameters['value'] = value
+ return client.update(location=location,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_connection_list(client,
+ resource_group_name,
+ workspace_name,
+ target=None,
+ category=None):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ target=target,
+ category=category)
+
+
+def machinelearningservices_workspace_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_workspace_connection_create(client,
+ resource_group_name,
+ workspace_name,
+ connection_name,
+ name=None,
+ category=None,
+ target=None,
+ auth_type=None,
+ value=None):
+ parameters = {}
+ parameters['name'] = name
+ parameters['category'] = category
+ parameters['target'] = target
+ parameters['auth_type'] = auth_type
+ parameters['value'] = value
+ return client.create(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_machine_learning_compute_list(client,
+ resource_group_name,
+ workspace_name,
+ skiptoken=None):
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ skiptoken=skiptoken)
+
+
+def machinelearningservices_machine_learning_compute_show(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_aks_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ ak_s_compute_location=None,
+ ak_s_description=None,
+ ak_s_resource_id=None,
+ ak_s_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'Aks'
+ parameters['properties']['compute_location'] = ak_s_compute_location
+ parameters['properties']['description'] = ak_s_description
+ parameters['properties']['resource_id'] = ak_s_resource_id
+ parameters['properties']['properties'] = ak_s_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_aml_compute_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ aml_compute_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'AmlCompute'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = aml_compute_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_compute_instance_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ compute_instance_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'ComputeInstance'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = compute_instance_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_data_factory_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'DataFactory'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_data_lake_analytics_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ data_lake_store_account_name=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'DataLakeAnalytics'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['data_lake_store_account_name'] = data_lake_store_account_name
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_databricks_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ databricks_access_token=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'Databricks'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['databricks_access_token'] = databricks_access_token
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_hd_insight_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ ssh_port=None,
+ address=None,
+ administrator_account=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'HdInsight'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['ssh_port'] = ssh_port
+ parameters['properties']['properties']['address'] = address
+ parameters['properties']['properties']['administrator_account'] = administrator_account
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_virtual_machine_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ virtual_machine_size=None,
+ ssh_port=None,
+ address=None,
+ administrator_account=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'VirtualMachine'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['virtual_machine_size'] = virtual_machine_size
+ parameters['properties']['properties']['ssh_port'] = ssh_port
+ parameters['properties']['properties']['address'] = address
+ parameters['properties']['properties']['administrator_account'] = administrator_account
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_update(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ scale_settings=None,
+ no_wait=False):
+ parameters = {}
+ parameters['scale_settings'] = scale_settings
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_delete(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ underlying_resource_action,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action)
+
+
+def machinelearningservices_machine_learning_compute_list_key(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_list_node(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_nodes(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_restart(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.restart(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_start(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.start(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_stop(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.stop(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_list_sku(client):
+ return client.list_skus()
+
+
+def machinelearningservices_private_endpoint_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_put(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ private_link_service_connection_state=None):
+ properties = {}
+ properties['private_link_service_connection_state'] = private_link_service_connection_state
+ return client.put(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ properties=properties)
+
+
+def machinelearningservices_private_link_resource_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
diff --git a/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
new file mode 100644
index 00000000000..70488e93851
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
@@ -0,0 +1,116 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+import inspect
+import logging
+import os
+import sys
+import traceback
+import datetime as dt
+
+from azure.core.exceptions import AzureError
+from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError
+
+
+logger = logging.getLogger('azure.cli.testsdk')
+logger.addHandler(logging.StreamHandler())
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+exceptions = []
+test_map = dict()
+SUCCESSED = "successed"
+FAILED = "failed"
+
+
+def try_manual(func):
+ def import_manual_function(origin_func):
+ from importlib import import_module
+ decorated_path = inspect.getfile(origin_func).lower()
+ module_path = __path__[0].lower()
+ if not decorated_path.startswith(module_path):
+ raise Exception("Decorator can only be used in submodules!")
+ manual_path = os.path.join(
+ decorated_path[module_path.rfind(os.path.sep) + 1:])
+ manual_file_path, manual_file_name = os.path.split(manual_path)
+ module_name, _ = os.path.splitext(manual_file_name)
+ manual_module = "..manual." + \
+ ".".join(manual_file_path.split(os.path.sep) + [module_name, ])
+ return getattr(import_module(manual_module, package=__name__), origin_func.__name__)
+
+ def get_func_to_call():
+ func_to_call = func
+ try:
+ func_to_call = import_manual_function(func)
+ logger.info("Found manual override for %s(...)", func.__name__)
+ except (ImportError, AttributeError):
+ pass
+ return func_to_call
+
+ def wrapper(*args, **kwargs):
+ func_to_call = get_func_to_call()
+ logger.info("running %s()...", func.__name__)
+ try:
+ test_map[func.__name__] = dict()
+ test_map[func.__name__]["result"] = SUCCESSED
+ test_map[func.__name__]["error_message"] = ""
+ test_map[func.__name__]["error_stack"] = ""
+ test_map[func.__name__]["error_normalized"] = ""
+ test_map[func.__name__]["start_dt"] = dt.datetime.utcnow()
+ ret = func_to_call(*args, **kwargs)
+ except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit,
+ JMESPathCheckAssertionError) as e:
+ use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE")
+ if use_exception_cache is None or use_exception_cache.lower() != "true":
+ raise
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ test_map[func.__name__]["result"] = FAILED
+ test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500]
+ test_map[func.__name__]["error_stack"] = traceback.format_exc().replace(
+ "\r\n", " ").replace("\n", " ")[:500]
+ logger.info("--------------------------------------")
+ logger.info("step exception: %s", e)
+ logger.error("--------------------------------------")
+ logger.error("step exception in %s: %s", func.__name__, e)
+ logger.info(traceback.format_exc())
+ exceptions.append((func.__name__, sys.exc_info()))
+ else:
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ return ret
+
+ if inspect.isclass(func):
+ return get_func_to_call()
+ return wrapper
+
+
+def calc_coverage(filename):
+ filename = filename.split(".")[0]
+ coverage_name = filename + "_coverage.md"
+ with open(coverage_name, "w") as f:
+ f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n")
+ total = len(test_map)
+ covered = 0
+ for k, v in test_map.items():
+ if not k.startswith("step_"):
+ total -= 1
+ continue
+ if v["result"] == SUCCESSED:
+ covered += 1
+ f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|"
+ "{end_dt}|\n".format(step_name=k, **v))
+ f.write("Coverage: {}/{}\n".format(covered, total))
+ print("Create coverage\n", file=sys.stderr)
+
+
+def raise_if():
+ if exceptions:
+ if len(exceptions) <= 1:
+ raise exceptions[0][1][1]
+ message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1]))
+ message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]])
+ raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2])
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
new file mode 100644
index 00000000000..90952047249
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
@@ -0,0 +1,496 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+from .. import try_manual
+
+
+# EXAMPLE: /Workspaces/put/Create Workspace
+@try_manual
+def step_workspace_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace create '
+ '--type "SystemAssigned" '
+ '--location "eastus2euap" '
+ '--description "test description" '
+ '--application-insights "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.insights'
+ '/components/testinsights" '
+ '--container-registry "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ContainerR'
+ 'egistry/registries/testRegistry" '
+ '--key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/'
+ 'aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/'
+ 'providers/Microsoft.KeyVault/vaults/testkv" '
+ '--status "Enabled" '
+ '--friendly-name "HelloName" '
+ '--hbi-workspace false '
+ '--key-vault "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vaults/tes'
+ 'tkv" '
+ '--shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/{subscript'
+ 'ion_id}/resourceGroups/{rg}/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkRes'
+ 'ources/{myPrivateLinkResource}" group-id="{myPrivateLinkResource}" request-message="Please approve" '
+ 'status="Approved" '
+ '--storage-account "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storage/sto'
+ 'rageAccounts/{sa}" '
+ '--sku name="Basic" tier="Basic" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=[])
+ test.cmd('az machinelearningservices workspace wait --created '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspace
+@try_manual
+def step_workspace_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace show '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by Resource Group
+@try_manual
+def step_workspace_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by subscription
+@try_manual
+def step_workspace_list2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '-g ""',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/patch/Update Workspace
+@try_manual
+def step_workspace_update(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace update '
+ '--description "new description" '
+ '--friendly-name "New friendly name" '
+ '--sku name="Enterprise" tier="Enterprise" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/List Workspace Keys
+@try_manual
+def step_workspace_list_key(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Resync Workspace Keys
+@try_manual
+def step_workspace_resync_key(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace resync-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute
+@try_manual
+def step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--type "SystemAssigned,UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRes'
+ 'ourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"maxNod'
+ 'eCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\"Dedic'
+ 'ated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublicAccess'
+ '\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs
+@try_manual
+def step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute
+@try_manual
+def step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute
+@try_manual
+def step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ return step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get a AKS Compute
+@try_manual
+def step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute show '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get a AML Compute
+@try_manual
+def step_machine_learning_compute_show2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ return step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get a ComputeInstance
+@try_manual
+def step_machine_learning_compute_show3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ return step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get Computes
+@try_manual
+def step_machine_learning_compute_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/patch/Update a AmlCompute Compute
+@try_manual
+def step_machine_learning_compute_update(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute update '
+ '--compute-name "compute123" '
+ '--scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Get compute nodes information for a compute
+@try_manual
+def step_machine_learning_compute_list_node(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list-node '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/List AKS Compute Keys
+@try_manual
+def step_machine_learning_compute_list_key(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list-key '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Restart ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_restart(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute restart '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Start ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_start(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute start '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Stop ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_stop(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute stop '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/delete/Delete Compute
+@try_manual
+def step_machine_learning_compute_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute delete -y '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--underlying-resource-action "Delete" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /machinelearningservices/get/List Skus
+@try_manual
+def step_list_sku(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices list-sku',
+ checks=checks)
+
+
+# EXAMPLE: /Notebooks/post/Prepare Notebook
+@try_manual
+def step_notebook_prepare(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices notebook prepare '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/put/WorkspacePutPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection put '
+ '--name "{myPrivateEndpointConnection}" '
+ '--private-link-service-connection-state description="Auto-Approved" status="Approved" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/get/WorkspaceGetPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection show '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/delete/WorkspaceDeletePrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection delete -y '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateLinkResources/get/WorkspaceListPrivateLinkResources
+@try_manual
+def step_private_link_resource_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-link-resource list '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/get/List workspace quotas by VMFamily
+@try_manual
+def step_quota_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/post/update quotas
+@try_manual
+def step_quota_update(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota update '
+ '--location "eastus" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" '
+ 'id="/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/wo'
+ 'rkspaces/{myWorkspace3}/quotas/{myQuota}" limit=100 unit="Count" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" '
+ 'id="/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/wo'
+ 'rkspaces/{myWorkspace4}/quotas/{myQuota}" limit=200 unit="Count"',
+ checks=checks)
+
+
+# EXAMPLE: /Usages/get/List Usages
+@try_manual
+def step_usage_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices usage list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /VirtualMachineSizes/get/List VM Sizes
+@try_manual
+def step_virtual_machine_size_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices virtual-machine-size list '
+ '--location "eastus" '
+ '--recommended false',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/put/CreateWorkspaceConnection
+@try_manual
+def step_workspace_connection_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection create '
+ '--connection-name "{myMachinelearningservice}" '
+ '--name "{myMachinelearningservice}" '
+ '--auth-type "PAT" '
+ '--category "ACR" '
+ '--target "www.facebook.com" '
+ '--value "secrets" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/GetWorkspaceConnection
+@try_manual
+def step_workspace_connection_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection show '
+ '--connection-name "{myMachinelearningservice}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/ListWorkspaceConnections
+@try_manual
+def step_workspace_connection_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection list '
+ '--category "ACR" '
+ '--resource-group "{rg_6}" '
+ '--target "www.facebook.com" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/delete/DeleteWorkspaceConnection
+@try_manual
+def step_workspace_connection_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection delete -y '
+ '--connection-name "{myMachinelearningservice}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceFeatures/get/List Workspace features
+@try_manual
+def step_workspace_feature_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-feature list '
+ '--resource-group "{rg_4}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/delete/Delete Workspace
+@try_manual
+def step_workspace_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace delete -y '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
new file mode 100644
index 00000000000..1d8a3563c01
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
@@ -0,0 +1,264 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+import os
+from azure.cli.testsdk import ScenarioTest
+from azure.cli.testsdk import ResourceGroupPreparer
+from azure.cli.testsdk import StorageAccountPreparer
+from .example_steps import step_workspace_create
+from .example_steps import step_workspace_show
+from .example_steps import step_workspace_list
+from .example_steps import step_workspace_list2
+from .example_steps import step_workspace_update
+from .example_steps import step_workspace_list_key
+from .example_steps import step_workspace_resync_key
+from .example_steps import step_machine_learning_compute_aks_create
+from .example_steps import step_machine_learning_compute_aks_create2
+from .example_steps import step_machine_learning_compute_aks_create3
+from .example_steps import step_machine_learning_compute_aks_create4
+from .example_steps import step_machine_learning_compute_aks_create5
+from .example_steps import step_machine_learning_compute_show
+from .example_steps import step_machine_learning_compute_show2
+from .example_steps import step_machine_learning_compute_show3
+from .example_steps import step_machine_learning_compute_list
+from .example_steps import step_machine_learning_compute_update
+from .example_steps import step_machine_learning_compute_list_node
+from .example_steps import step_machine_learning_compute_list_key
+from .example_steps import step_machine_learning_compute_restart
+from .example_steps import step_machine_learning_compute_start
+from .example_steps import step_machine_learning_compute_stop
+from .example_steps import step_machine_learning_compute_delete
+from .example_steps import step_list_sku
+from .example_steps import step_notebook_prepare
+from .example_steps import step_private_endpoint_connection_put
+from .example_steps import step_private_endpoint_connection_show
+from .example_steps import step_private_endpoint_connection_delete
+from .example_steps import step_private_link_resource_list
+from .example_steps import step_quota_list
+from .example_steps import step_quota_update
+from .example_steps import step_usage_list
+from .example_steps import step_virtual_machine_size_list
+from .example_steps import step_workspace_connection_create
+from .example_steps import step_workspace_connection_show
+from .example_steps import step_workspace_connection_list
+from .example_steps import step_workspace_connection_delete
+from .example_steps import step_workspace_feature_list
+from .example_steps import step_workspace_delete
+from .. import (
+ try_manual,
+ raise_if,
+ calc_coverage
+)
+
+
+TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+
+# Env setup_scenario
+@try_manual
+def setup_scenario(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7):
+ pass
+
+
+# Env cleanup_scenario
+@try_manual
+def cleanup_scenario(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7):
+ pass
+
+
+# Testcase: Scenario
+@try_manual
+def call_scenario(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7):
+ setup_scenario(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7)
+ step_workspace_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("sku.name", "Basic", case_sensitive=False),
+ test.check("sku.tier", "Basic", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_workspace_list2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[
+ test.check('length(@)', 2),
+ ])
+ step_workspace_update(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "new description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("friendlyName", "New friendly name", case_sensitive=False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("sku.name", "Enterprise", case_sensitive=False),
+ test.check("sku.tier", "Enterprise", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list_key(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_workspace_resync_key(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_show2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_show3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_update(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_list_node(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_list_key(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_restart(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_start(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_stop(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_list_sku(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_notebook_prepare(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_private_endpoint_connection_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_private_endpoint_connection_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[
+ test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False),
+ ])
+ step_private_endpoint_connection_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_private_link_resource_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_quota_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_quota_update(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_usage_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_virtual_machine_size_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_workspace_connection_create(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_workspace_connection_show(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_workspace_connection_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_workspace_connection_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_workspace_feature_list(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ step_workspace_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7, checks=[])
+ cleanup_scenario(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7)
+
+
+# Test class for Scenario
+@try_manual
+class MachinelearningservicesScenarioTest(ScenarioTest):
+
+ def __init__(self, *args, **kwargs):
+ super(MachinelearningservicesScenarioTest, self).__init__(*args, **kwargs)
+ self.kwargs.update({
+ 'subscription_id': self.get_subscription_id()
+ })
+
+ self.kwargs.update({
+ 'myMachinelearningservice': 'connection-1',
+ 'myWorkspace6': 'default',
+ 'myPrivateLinkResource2': 'default',
+ 'myWorkspace3': 'demo_workspace1',
+ 'myWorkspace4': 'demo_workspace2',
+ 'myWorkspace': 'testworkspace',
+ 'myWorkspace2': 'workspaces123',
+ 'myWorkspace5': 'workspace-1',
+ 'myQuota': 'StandardDSv2Family',
+ 'myPrivateEndpointConnection': '{privateEndpointConnectionName}',
+ 'myPrivateLinkResource': 'Sql',
+ })
+
+
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_workspace-1234'[:7], key='rg',
+ parameter_name='rg')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_accountcrud-1234'[:7], key='rg_2',
+ parameter_name='rg_2')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg'[:7], key='rg_5', parameter_name='rg_5')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_testrg123'[:7], key='rg_3',
+ parameter_name='rg_3')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_myResourceGroup'[:7], key='rg_4',
+ parameter_name='rg_4')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_resourceGroup-1'[:7], key='rg_6',
+ parameter_name='rg_6')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg-1234'[:7], key='rg_7',
+ parameter_name='rg_7')
+ @StorageAccountPreparer(name_prefix='clitestmachinelearningservices_testStorageAccount'[:7], key='sa',
+ resource_group_parameter_name='rg_2')
+ def test_machinelearningservices_Scenario(self, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7):
+ call_scenario(self, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7)
+ calc_coverage(__file__)
+ raise_if()
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
new file mode 100644
index 00000000000..dad2c6eeb01
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
@@ -0,0 +1,16 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+try:
+ from ._patch import patch_sdk # type: ignore
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..873682820bf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
@@ -0,0 +1,120 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.mgmt.core import ARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Optional
+
+ from azure.core.credentials import TokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import NotebooksOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import MachineLearningComputeOperations
+from .operations import AzureMachineLearningWorkspacesOperationsMixin
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from . import models
+
+
+class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.operations.WorkspacesOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.operations.WorkspaceFeaturesOperations
+ :ivar notebooks: NotebooksOperations operations
+ :vartype notebooks: azure_machine_learning_workspaces.operations.NotebooksOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.operations.QuotasOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.operations.WorkspaceConnectionsOperations
+ :ivar machine_learning_compute: MachineLearningComputeOperations operations
+ :vartype machine_learning_compute: azure_machine_learning_workspaces.operations.MachineLearningComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.operations.PrivateLinkResourcesOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ base_url=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.notebooks = NotebooksOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_compute = MachineLearningComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> AzureMachineLearningWorkspaces
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
new file mode 100644
index 00000000000..0d9620bc63b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
@@ -0,0 +1,70 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2020-08-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
new file mode 100644
index 00000000000..872474577c4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
@@ -0,0 +1,10 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..20e6e369512
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,114 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Optional, TYPE_CHECKING
+
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import NotebooksOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import MachineLearningComputeOperations
+from .operations import AzureMachineLearningWorkspacesOperationsMixin
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .. import models
+
+
+class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.aio.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.aio.operations.WorkspacesOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.aio.operations.WorkspaceFeaturesOperations
+ :ivar notebooks: NotebooksOperations operations
+ :vartype notebooks: azure_machine_learning_workspaces.aio.operations.NotebooksOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.aio.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.aio.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.aio.operations.QuotasOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.aio.operations.WorkspaceConnectionsOperations
+ :ivar machine_learning_compute: MachineLearningComputeOperations operations
+ :vartype machine_learning_compute: azure_machine_learning_workspaces.aio.operations.MachineLearningComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.aio.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.aio.operations.PrivateLinkResourcesOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ base_url: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.notebooks = NotebooksOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_compute = MachineLearningComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
new file mode 100644
index 00000000000..673297675a4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
@@ -0,0 +1,66 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ **kwargs: Any
+ ) -> None:
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2020-08-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs: Any
+ ) -> None:
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
new file mode 100644
index 00000000000..516999b100d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
@@ -0,0 +1,35 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._notebooks_operations import NotebooksOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._machine_learning_compute_operations import MachineLearningComputeOperations
+from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'WorkspaceFeaturesOperations',
+ 'NotebooksOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'WorkspaceConnectionsOperations',
+ 'MachineLearningComputeOperations',
+ 'AzureMachineLearningWorkspacesOperationsMixin',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py
new file mode 100644
index 00000000000..8cf57106b6f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py
@@ -0,0 +1,89 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class AzureMachineLearningWorkspacesOperationsMixin:
+
+ def list_skus(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.SkuListResult"]:
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_skus.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py
new file mode 100644
index 00000000000..8cfe9577f3c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py
@@ -0,0 +1,914 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningComputeOperations:
+ """MachineLearningComputeOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedComputeResourcesList"]:
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeResource":
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.AmlComputeNodesInformation"]:
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeSecrets":
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ async def start(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.start.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def stop(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.stop.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def restart(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py
new file mode 100644
index 00000000000..23fffbd7e42
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py
@@ -0,0 +1,160 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class NotebooksOperations:
+ """NotebooksOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def _prepare_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> Optional["models.NotebookResourceInfo"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def begin_prepare(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller["models.NotebookResourceInfo"]:
+ """prepare.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._prepare_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
new file mode 100644
index 00000000000..9bc5433b754
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations:
+ """Operations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.OperationListResult"]:
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..3423a76fc69
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,296 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations:
+ """PrivateEndpointConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def put(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ properties: "models.PrivateEndpointConnection",
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.put.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..e30fb5e0d1e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
@@ -0,0 +1,99 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations:
+ """PrivateLinkResourcesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.PrivateLinkResourceListResult":
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
new file mode 100644
index 00000000000..aef9ebc99e3
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
@@ -0,0 +1,175 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations:
+ """QuotasOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def update(
+ self,
+ location: str,
+ parameters: "models.QuotaUpdateParameters",
+ **kwargs
+ ) -> "models.UpdateWorkspaceQuotasResult":
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListWorkspaceQuotas"]:
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
new file mode 100644
index 00000000000..89f5d0a6cf9
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
@@ -0,0 +1,113 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations:
+ """UsagesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListUsagesResult"]:
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..70fe284ac62
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations:
+ """VirtualMachineSizesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ location: str,
+ compute_type: Optional[str] = None,
+ recommended: Optional[bool] = None,
+ **kwargs
+ ) -> "models.VirtualMachineSizeListResult":
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :param compute_type: Type of compute to filter by.
+ :type compute_type: str
+ :param recommended: Specifies whether to return recommended vm sizes or all vm sizes.
+ :type recommended: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if compute_type is not None:
+ query_parameters['compute-type'] = self._serialize.query("compute_type", compute_type, 'str')
+ if recommended is not None:
+ query_parameters['recommended'] = self._serialize.query("recommended", recommended, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..5259a75af47
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
@@ -0,0 +1,321 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations:
+ """WorkspaceConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ target: Optional[str] = None,
+ category: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedWorkspaceConnectionsList"]:
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ async def create(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ parameters: "models.WorkspaceConnectionDto",
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Add a new workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnectionDto
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..7ad08485073
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations:
+ """WorkspaceFeaturesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListAmlUserFeatureResult"]:
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..fc12e6d6231
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
@@ -0,0 +1,674 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations:
+ """WorkspacesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.Workspace":
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> Optional["models.Workspace"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> AsyncLROPoller["models.Workspace"]:
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.WorkspaceUpdateParameters",
+ **kwargs
+ ) -> "models.Workspace":
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name: str,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListWorkspaceKeysResult":
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ async def resync_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.resync_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skiptoken: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
new file mode 100644
index 00000000000..b5c03186f1b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
@@ -0,0 +1,355 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import Aks
+ from ._models_py3 import AksComputeSecrets
+ from ._models_py3 import AksNetworkingConfiguration
+ from ._models_py3 import AksProperties
+ from ._models_py3 import AmlCompute
+ from ._models_py3 import AmlComputeNodeInformation
+ from ._models_py3 import AmlComputeNodesInformation
+ from ._models_py3 import AmlComputeProperties
+ from ._models_py3 import AmlUserFeature
+ from ._models_py3 import AssignedUser
+ from ._models_py3 import ClusterUpdateParameters
+ from ._models_py3 import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties
+ from ._models_py3 import Compute
+ from ._models_py3 import ComputeInstance
+ from ._models_py3 import ComputeInstanceApplication
+ from ._models_py3 import ComputeInstanceConnectivityEndpoints
+ from ._models_py3 import ComputeInstanceCreatedBy
+ from ._models_py3 import ComputeInstanceLastOperation
+ from ._models_py3 import ComputeInstanceProperties
+ from ._models_py3 import ComputeInstanceSshSettings
+ from ._models_py3 import ComputeNodesInformation
+ from ._models_py3 import ComputeResource
+ from ._models_py3 import ComputeSecrets
+ from ._models_py3 import DataFactory
+ from ._models_py3 import DataLakeAnalytics
+ from ._models_py3 import DataLakeAnalyticsProperties
+ from ._models_py3 import Databricks
+ from ._models_py3 import DatabricksComputeSecrets
+ from ._models_py3 import DatabricksProperties
+ from ._models_py3 import EncryptionProperty
+ from ._models_py3 import ErrorDetail
+ from ._models_py3 import ErrorResponse
+ from ._models_py3 import EstimatedVmPrice
+ from ._models_py3 import EstimatedVmPrices
+ from ._models_py3 import HdInsight
+ from ._models_py3 import HdInsightProperties
+ from ._models_py3 import Identity
+ from ._models_py3 import KeyVaultProperties
+ from ._models_py3 import ListAmlUserFeatureResult
+ from ._models_py3 import ListUsagesResult
+ from ._models_py3 import ListWorkspaceKeysResult
+ from ._models_py3 import ListWorkspaceQuotas
+ from ._models_py3 import MachineLearningServiceError
+ from ._models_py3 import NodeStateCounts
+ from ._models_py3 import NotebookListCredentialsResult
+ from ._models_py3 import NotebookPreparationError
+ from ._models_py3 import NotebookResourceInfo
+ from ._models_py3 import Operation
+ from ._models_py3 import OperationDisplay
+ from ._models_py3 import OperationListResult
+ from ._models_py3 import PaginatedComputeResourcesList
+ from ._models_py3 import PaginatedWorkspaceConnectionsList
+ from ._models_py3 import Password
+ from ._models_py3 import PersonalComputeInstanceSettings
+ from ._models_py3 import PrivateEndpoint
+ from ._models_py3 import PrivateEndpointConnection
+ from ._models_py3 import PrivateLinkResource
+ from ._models_py3 import PrivateLinkResourceListResult
+ from ._models_py3 import PrivateLinkServiceConnectionState
+ from ._models_py3 import QuotaBaseProperties
+ from ._models_py3 import QuotaUpdateParameters
+ from ._models_py3 import RegistryListCredentialsResult
+ from ._models_py3 import Resource
+ from ._models_py3 import ResourceId
+ from ._models_py3 import ResourceName
+ from ._models_py3 import ResourceQuota
+ from ._models_py3 import ResourceSkuLocationInfo
+ from ._models_py3 import ResourceSkuZoneDetails
+ from ._models_py3 import Restriction
+ from ._models_py3 import ScaleSettings
+ from ._models_py3 import ServicePrincipalCredentials
+ from ._models_py3 import SharedPrivateLinkResource
+ from ._models_py3 import Sku
+ from ._models_py3 import SkuCapability
+ from ._models_py3 import SkuListResult
+ from ._models_py3 import SkuSettings
+ from ._models_py3 import SslConfiguration
+ from ._models_py3 import SystemService
+ from ._models_py3 import UpdateWorkspaceQuotas
+ from ._models_py3 import UpdateWorkspaceQuotasResult
+ from ._models_py3 import Usage
+ from ._models_py3 import UsageName
+ from ._models_py3 import UserAccountCredentials
+ from ._models_py3 import VirtualMachine
+ from ._models_py3 import VirtualMachineProperties
+ from ._models_py3 import VirtualMachineSecrets
+ from ._models_py3 import VirtualMachineSize
+ from ._models_py3 import VirtualMachineSizeListResult
+ from ._models_py3 import VirtualMachineSshCredentials
+ from ._models_py3 import Workspace
+ from ._models_py3 import WorkspaceConnection
+ from ._models_py3 import WorkspaceConnectionDto
+ from ._models_py3 import WorkspaceListResult
+ from ._models_py3 import WorkspaceSku
+ from ._models_py3 import WorkspaceUpdateParameters
+except (SyntaxError, ImportError):
+ from ._models import Aks # type: ignore
+ from ._models import AksComputeSecrets # type: ignore
+ from ._models import AksNetworkingConfiguration # type: ignore
+ from ._models import AksProperties # type: ignore
+ from ._models import AmlCompute # type: ignore
+ from ._models import AmlComputeNodeInformation # type: ignore
+ from ._models import AmlComputeNodesInformation # type: ignore
+ from ._models import AmlComputeProperties # type: ignore
+ from ._models import AmlUserFeature # type: ignore
+ from ._models import AssignedUser # type: ignore
+ from ._models import ClusterUpdateParameters # type: ignore
+ from ._models import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties # type: ignore
+ from ._models import Compute # type: ignore
+ from ._models import ComputeInstance # type: ignore
+ from ._models import ComputeInstanceApplication # type: ignore
+ from ._models import ComputeInstanceConnectivityEndpoints # type: ignore
+ from ._models import ComputeInstanceCreatedBy # type: ignore
+ from ._models import ComputeInstanceLastOperation # type: ignore
+ from ._models import ComputeInstanceProperties # type: ignore
+ from ._models import ComputeInstanceSshSettings # type: ignore
+ from ._models import ComputeNodesInformation # type: ignore
+ from ._models import ComputeResource # type: ignore
+ from ._models import ComputeSecrets # type: ignore
+ from ._models import DataFactory # type: ignore
+ from ._models import DataLakeAnalytics # type: ignore
+ from ._models import DataLakeAnalyticsProperties # type: ignore
+ from ._models import Databricks # type: ignore
+ from ._models import DatabricksComputeSecrets # type: ignore
+ from ._models import DatabricksProperties # type: ignore
+ from ._models import EncryptionProperty # type: ignore
+ from ._models import ErrorDetail # type: ignore
+ from ._models import ErrorResponse # type: ignore
+ from ._models import EstimatedVmPrice # type: ignore
+ from ._models import EstimatedVmPrices # type: ignore
+ from ._models import HdInsight # type: ignore
+ from ._models import HdInsightProperties # type: ignore
+ from ._models import Identity # type: ignore
+ from ._models import KeyVaultProperties # type: ignore
+ from ._models import ListAmlUserFeatureResult # type: ignore
+ from ._models import ListUsagesResult # type: ignore
+ from ._models import ListWorkspaceKeysResult # type: ignore
+ from ._models import ListWorkspaceQuotas # type: ignore
+ from ._models import MachineLearningServiceError # type: ignore
+ from ._models import NodeStateCounts # type: ignore
+ from ._models import NotebookListCredentialsResult # type: ignore
+ from ._models import NotebookPreparationError # type: ignore
+ from ._models import NotebookResourceInfo # type: ignore
+ from ._models import Operation # type: ignore
+ from ._models import OperationDisplay # type: ignore
+ from ._models import OperationListResult # type: ignore
+ from ._models import PaginatedComputeResourcesList # type: ignore
+ from ._models import PaginatedWorkspaceConnectionsList # type: ignore
+ from ._models import Password # type: ignore
+ from ._models import PersonalComputeInstanceSettings # type: ignore
+ from ._models import PrivateEndpoint # type: ignore
+ from ._models import PrivateEndpointConnection # type: ignore
+ from ._models import PrivateLinkResource # type: ignore
+ from ._models import PrivateLinkResourceListResult # type: ignore
+ from ._models import PrivateLinkServiceConnectionState # type: ignore
+ from ._models import QuotaBaseProperties # type: ignore
+ from ._models import QuotaUpdateParameters # type: ignore
+ from ._models import RegistryListCredentialsResult # type: ignore
+ from ._models import Resource # type: ignore
+ from ._models import ResourceId # type: ignore
+ from ._models import ResourceName # type: ignore
+ from ._models import ResourceQuota # type: ignore
+ from ._models import ResourceSkuLocationInfo # type: ignore
+ from ._models import ResourceSkuZoneDetails # type: ignore
+ from ._models import Restriction # type: ignore
+ from ._models import ScaleSettings # type: ignore
+ from ._models import ServicePrincipalCredentials # type: ignore
+ from ._models import SharedPrivateLinkResource # type: ignore
+ from ._models import Sku # type: ignore
+ from ._models import SkuCapability # type: ignore
+ from ._models import SkuListResult # type: ignore
+ from ._models import SkuSettings # type: ignore
+ from ._models import SslConfiguration # type: ignore
+ from ._models import SystemService # type: ignore
+ from ._models import UpdateWorkspaceQuotas # type: ignore
+ from ._models import UpdateWorkspaceQuotasResult # type: ignore
+ from ._models import Usage # type: ignore
+ from ._models import UsageName # type: ignore
+ from ._models import UserAccountCredentials # type: ignore
+ from ._models import VirtualMachine # type: ignore
+ from ._models import VirtualMachineProperties # type: ignore
+ from ._models import VirtualMachineSecrets # type: ignore
+ from ._models import VirtualMachineSize # type: ignore
+ from ._models import VirtualMachineSizeListResult # type: ignore
+ from ._models import VirtualMachineSshCredentials # type: ignore
+ from ._models import Workspace # type: ignore
+ from ._models import WorkspaceConnection # type: ignore
+ from ._models import WorkspaceConnectionDto # type: ignore
+ from ._models import WorkspaceListResult # type: ignore
+ from ._models import WorkspaceSku # type: ignore
+ from ._models import WorkspaceUpdateParameters # type: ignore
+
+from ._azure_machine_learning_workspaces_enums import (
+ AllocationState,
+ ApplicationSharingPolicy,
+ BillingCurrency,
+ ComputeInstanceAuthorizationType,
+ ComputeInstanceState,
+ ComputeType,
+ EncryptionStatus,
+ NodeState,
+ OperationName,
+ OperationStatus,
+ OsType,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateEndpointServiceConnectionStatus,
+ ProvisioningState,
+ QuotaUnit,
+ ReasonCode,
+ RemoteLoginPortPublicAccess,
+ ResourceIdentityType,
+ SshPublicAccess,
+ SslConfigurationStatus,
+ Status,
+ UnderlyingResourceAction,
+ UnitOfMeasure,
+ UsageUnit,
+ VmPriceOsType,
+ VmPriority,
+ VmTier,
+)
+
+__all__ = [
+ 'Aks',
+ 'AksComputeSecrets',
+ 'AksNetworkingConfiguration',
+ 'AksProperties',
+ 'AmlCompute',
+ 'AmlComputeNodeInformation',
+ 'AmlComputeNodesInformation',
+ 'AmlComputeProperties',
+ 'AmlUserFeature',
+ 'AssignedUser',
+ 'ClusterUpdateParameters',
+ 'ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties',
+ 'Compute',
+ 'ComputeInstance',
+ 'ComputeInstanceApplication',
+ 'ComputeInstanceConnectivityEndpoints',
+ 'ComputeInstanceCreatedBy',
+ 'ComputeInstanceLastOperation',
+ 'ComputeInstanceProperties',
+ 'ComputeInstanceSshSettings',
+ 'ComputeNodesInformation',
+ 'ComputeResource',
+ 'ComputeSecrets',
+ 'DataFactory',
+ 'DataLakeAnalytics',
+ 'DataLakeAnalyticsProperties',
+ 'Databricks',
+ 'DatabricksComputeSecrets',
+ 'DatabricksProperties',
+ 'EncryptionProperty',
+ 'ErrorDetail',
+ 'ErrorResponse',
+ 'EstimatedVmPrice',
+ 'EstimatedVmPrices',
+ 'HdInsight',
+ 'HdInsightProperties',
+ 'Identity',
+ 'KeyVaultProperties',
+ 'ListAmlUserFeatureResult',
+ 'ListUsagesResult',
+ 'ListWorkspaceKeysResult',
+ 'ListWorkspaceQuotas',
+ 'MachineLearningServiceError',
+ 'NodeStateCounts',
+ 'NotebookListCredentialsResult',
+ 'NotebookPreparationError',
+ 'NotebookResourceInfo',
+ 'Operation',
+ 'OperationDisplay',
+ 'OperationListResult',
+ 'PaginatedComputeResourcesList',
+ 'PaginatedWorkspaceConnectionsList',
+ 'Password',
+ 'PersonalComputeInstanceSettings',
+ 'PrivateEndpoint',
+ 'PrivateEndpointConnection',
+ 'PrivateLinkResource',
+ 'PrivateLinkResourceListResult',
+ 'PrivateLinkServiceConnectionState',
+ 'QuotaBaseProperties',
+ 'QuotaUpdateParameters',
+ 'RegistryListCredentialsResult',
+ 'Resource',
+ 'ResourceId',
+ 'ResourceName',
+ 'ResourceQuota',
+ 'ResourceSkuLocationInfo',
+ 'ResourceSkuZoneDetails',
+ 'Restriction',
+ 'ScaleSettings',
+ 'ServicePrincipalCredentials',
+ 'SharedPrivateLinkResource',
+ 'Sku',
+ 'SkuCapability',
+ 'SkuListResult',
+ 'SkuSettings',
+ 'SslConfiguration',
+ 'SystemService',
+ 'UpdateWorkspaceQuotas',
+ 'UpdateWorkspaceQuotasResult',
+ 'Usage',
+ 'UsageName',
+ 'UserAccountCredentials',
+ 'VirtualMachine',
+ 'VirtualMachineProperties',
+ 'VirtualMachineSecrets',
+ 'VirtualMachineSize',
+ 'VirtualMachineSizeListResult',
+ 'VirtualMachineSshCredentials',
+ 'Workspace',
+ 'WorkspaceConnection',
+ 'WorkspaceConnectionDto',
+ 'WorkspaceListResult',
+ 'WorkspaceSku',
+ 'WorkspaceUpdateParameters',
+ 'AllocationState',
+ 'ApplicationSharingPolicy',
+ 'BillingCurrency',
+ 'ComputeInstanceAuthorizationType',
+ 'ComputeInstanceState',
+ 'ComputeType',
+ 'EncryptionStatus',
+ 'NodeState',
+ 'OperationName',
+ 'OperationStatus',
+ 'OsType',
+ 'PrivateEndpointConnectionProvisioningState',
+ 'PrivateEndpointServiceConnectionStatus',
+ 'ProvisioningState',
+ 'QuotaUnit',
+ 'ReasonCode',
+ 'RemoteLoginPortPublicAccess',
+ 'ResourceIdentityType',
+ 'SshPublicAccess',
+ 'SslConfigurationStatus',
+ 'Status',
+ 'UnderlyingResourceAction',
+ 'UnitOfMeasure',
+ 'UsageUnit',
+ 'VmPriceOsType',
+ 'VmPriority',
+ 'VmTier',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
new file mode 100644
index 00000000000..8cfaec67bd3
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
@@ -0,0 +1,279 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum, EnumMeta
+from six import with_metaclass
+
+class _CaseInsensitiveEnumMeta(EnumMeta):
+ def __getitem__(self, name):
+ return super().__getitem__(name.upper())
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+ """
+ try:
+ return cls._member_map_[name.upper()]
+ except KeyError:
+ raise AttributeError(name)
+
+
+class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Allocation state of the compute. Possible values are: steady - Indicates that the compute is
+ not resizing. There are no changes to the number of compute nodes in the compute in progress. A
+ compute enters this state when it is created and when no operations are being performed on the
+ compute to change the number of compute nodes. resizing - Indicates that the compute is
+ resizing; that is, compute nodes are being added to or removed from the compute.
+ """
+
+ STEADY = "Steady"
+ RESIZING = "Resizing"
+
+class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Policy for sharing applications on this compute instance among users of parent workspace. If
+ Personal, only the creator can access applications on this compute instance. When Shared, any
+ workspace user can access applications on this instance depending on his/her assigned role.
+ """
+
+ PERSONAL = "Personal"
+ SHARED = "Shared"
+
+class BillingCurrency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Three lettered code specifying the currency of the VM price. Example: USD
+ """
+
+ USD = "USD"
+
+class ComputeInstanceAuthorizationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The Compute Instance Authorization type. Available values are personal (default).
+ """
+
+ PERSONAL = "personal"
+
+class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Current state of a ComputeInstance.
+ """
+
+ CREATING = "Creating"
+ CREATE_FAILED = "CreateFailed"
+ DELETING = "Deleting"
+ RUNNING = "Running"
+ RESTARTING = "Restarting"
+ JOB_RUNNING = "JobRunning"
+ SETTING_UP = "SettingUp"
+ SETUP_FAILED = "SetupFailed"
+ STARTING = "Starting"
+ STOPPED = "Stopped"
+ STOPPING = "Stopping"
+ USER_SETTING_UP = "UserSettingUp"
+ USER_SETUP_FAILED = "UserSetupFailed"
+ UNKNOWN = "Unknown"
+ UNUSABLE = "Unusable"
+
+class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of compute
+ """
+
+ AKS = "AKS"
+ AML_COMPUTE = "AmlCompute"
+ COMPUTE_INSTANCE = "ComputeInstance"
+ DATA_FACTORY = "DataFactory"
+ VIRTUAL_MACHINE = "VirtualMachine"
+ HD_INSIGHT = "HDInsight"
+ DATABRICKS = "Databricks"
+ DATA_LAKE_ANALYTICS = "DataLakeAnalytics"
+
+class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Indicates whether or not the encryption is enabled for the workspace.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the compute node. Values are idle, running, preparing, unusable, leaving and
+ preempted.
+ """
+
+ IDLE = "idle"
+ RUNNING = "running"
+ PREPARING = "preparing"
+ UNUSABLE = "unusable"
+ LEAVING = "leaving"
+ PREEMPTED = "preempted"
+
+class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Name of the last operation.
+ """
+
+ CREATE = "Create"
+ START = "Start"
+ STOP = "Stop"
+ RESTART = "Restart"
+ REIMAGE = "Reimage"
+ DELETE = "Delete"
+
+class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operation status.
+ """
+
+ IN_PROGRESS = "InProgress"
+ SUCCEEDED = "Succeeded"
+ CREATE_FAILED = "CreateFailed"
+ START_FAILED = "StartFailed"
+ STOP_FAILED = "StopFailed"
+ RESTART_FAILED = "RestartFailed"
+ REIMAGE_FAILED = "ReimageFailed"
+ DELETE_FAILED = "DeleteFailed"
+
+class OsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Compute OS Type
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current provisioning state.
+ """
+
+ SUCCEEDED = "Succeeded"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ FAILED = "Failed"
+
+class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The private endpoint connection status.
+ """
+
+ PENDING = "Pending"
+ APPROVED = "Approved"
+ REJECTED = "Rejected"
+ DISCONNECTED = "Disconnected"
+ TIMEOUT = "Timeout"
+
+class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current deployment state of workspace resource. The provisioningState is to indicate states
+ for resource provisioning.
+ """
+
+ UNKNOWN = "Unknown"
+ UPDATING = "Updating"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of quota measurement.
+ """
+
+ COUNT = "Count"
+
+class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The reason for the restriction.
+ """
+
+ NOT_SPECIFIED = "NotSpecified"
+ NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion"
+ NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription"
+
+class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is
+ open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed
+ on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be
+ default only during cluster creation time, after creation it will be either enabled or
+ disabled.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+ NOT_SPECIFIED = "NotSpecified"
+
+class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The identity type.
+ """
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ NONE = "None"
+
+class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on this instance. Enabled - Indicates that the public ssh port is open and
+ accessible according to the VNet/subnet policy if applicable.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class SslConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enable or disable ssl for scoring
+ """
+
+ DISABLED = "Disabled"
+ ENABLED = "Enabled"
+ AUTO = "Auto"
+
+class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Status of update workspace quota.
+ """
+
+ UNDEFINED = "Undefined"
+ SUCCESS = "Success"
+ FAILURE = "Failure"
+ INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum"
+ INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit"
+ INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName"
+ OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku"
+ OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion"
+
+class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ DELETE = "Delete"
+ DETACH = "Detach"
+
+class UnitOfMeasure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The unit of time measurement for the specified VM price. Example: OneHour
+ """
+
+ ONE_HOUR = "OneHour"
+
+class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of usage measurement.
+ """
+
+ COUNT = "Count"
+
+class VmPriceOsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operating system type used by the VM.
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Virtual Machine priority
+ """
+
+ DEDICATED = "Dedicated"
+ LOW_PRIORITY = "LowPriority"
+
+class VmTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of the VM.
+ """
+
+ STANDARD = "Standard"
+ LOW_PRIORITY = "LowPriority"
+ SPOT = "Spot"
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
new file mode 100644
index 00000000000..5e0fa59882d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
@@ -0,0 +1,3721 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Aks, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = kwargs.get('user_kube_config', None)
+ self.admin_kube_config = kwargs.get('admin_kube_config', None)
+ self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None)
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = kwargs.get('subnet_id', None)
+ self.service_cidr = kwargs.get('service_cidr', None)
+ self.dns_service_ip = kwargs.get('dns_service_ip', None)
+ self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None)
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 1},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = kwargs.get('cluster_fqdn', None)
+ self.system_services = None
+ self.agent_count = kwargs.get('agent_count', None)
+ self.agent_vm_size = kwargs.get('agent_vm_size', None)
+ self.ssl_configuration = kwargs.get('ssl_configuration', None)
+ self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None)
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = kwargs.get('os_type', "Linux")
+ self.vm_size = kwargs.get('vm_size', None)
+ self.vm_priority = kwargs.get('vm_priority', None)
+ self.scale_settings = kwargs.get('scale_settings', None)
+ self.user_account_credentials = kwargs.get('user_account_credentials', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified")
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.display_name = kwargs.get('display_name', None)
+ self.description = kwargs.get('description', None)
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = kwargs['object_id']
+ self.tenant_id = kwargs['tenant_id']
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = kwargs.get('scale_settings', None)
+
+
+class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model):
+ """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal id of user assigned identity.
+ :vartype principal_id: str
+ :ivar client_id: The client id of user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs)
+ self.principal_id = None
+ self.client_id = None
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(**kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.endpoint_uri = kwargs.get('endpoint_uri', None)
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for a ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = kwargs.get('operation_name', None)
+ self.operation_time = kwargs.get('operation_time', None)
+ self.operation_status = kwargs.get('operation_status', None)
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = kwargs.get('vm_size', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared")
+ self.ssh_settings = kwargs.get('ssh_settings', None)
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal")
+ self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None)
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled")
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = kwargs.get('admin_public_key', None)
+
+
+class Resource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.type = None
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+
+
+class ComputeResource(Resource):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(**kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(**kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None)
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = kwargs['status']
+ self.key_vault_properties = kwargs['key_vault_properties']
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """Error detail information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code: Required. Error code.
+ :type code: str
+ :param message: Required. Error message.
+ :type message: str
+ """
+
+ _validation = {
+ 'code': {'required': True},
+ 'message': {'required': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = kwargs['code']
+ self.message = kwargs['message']
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Error response information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.details = None
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = kwargs['retail_price']
+ self.os_type = kwargs['os_type']
+ self.vm_tier = kwargs['vm_tier']
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = kwargs['billing_currency']
+ self.unit_of_measure = kwargs['unit_of_measure']
+ self.values = kwargs['values']
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(**kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: Required. The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The list of user identities associated with resource. The user
+ identity dictionary key references will be ARM resource ids in the form:
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = kwargs['type']
+ self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = kwargs['key_vault_arm_id']
+ self.key_identifier = kwargs['key_identifier']
+ self.identity_client_id = kwargs.get('identity_client_id', None)
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :param notebook_access_keys:
+ :type notebook_access_keys:
+ ~azure_machine_learning_workspaces.models.NotebookListCredentialsResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = kwargs.get('notebook_access_keys', None)
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class MachineLearningServiceError(msrest.serialization.Model):
+ """Wrapper for error response to follow ARM guidelines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MachineLearningServiceError, self).__init__(**kwargs)
+ self.error = None
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookListCredentialsResult(msrest.serialization.Model):
+ """NotebookListCredentialsResult.
+
+ :param primary_access_key:
+ :type primary_access_key: str
+ :param secondary_access_key:
+ :type secondary_access_key: str
+ """
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookListCredentialsResult, self).__init__(**kwargs)
+ self.primary_access_key = kwargs.get('primary_access_key', None)
+ self.secondary_access_key = kwargs.get('secondary_access_key', None)
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = kwargs.get('error_message', None)
+ self.status_code = kwargs.get('status_code', None)
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = kwargs.get('fqdn', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None)
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.display = kwargs.get('display', None)
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = kwargs.get('provider', None)
+ self.resource = kwargs.get('resource', None)
+ self.operation = kwargs.get('operation', None)
+ self.description = kwargs.get('description', None)
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = kwargs.get('assigned_user', None)
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+
+
+class PrivateEndpointConnection(msrest.serialization.Model):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the private endpoint connection.
+ :vartype id: str
+ :ivar name: Friendly name of the private endpoint connection.
+ :vartype name: str
+ :ivar type: Resource type of private endpoint connection.
+ :vartype type: str
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.private_endpoint = kwargs.get('private_endpoint', None)
+ self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
+ self.provisioning_state = None
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(**kwargs)
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = kwargs.get('required_zone_names', None)
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.description = kwargs.get('description', None)
+ self.actions_required = kwargs.get('actions_required', None)
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.type = kwargs.get('type', None)
+ self.limit = kwargs.get('limit', None)
+ self.unit = kwargs.get('unit', None)
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = kwargs.get('passwords', None)
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = kwargs.get('reason_code', None)
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = kwargs['max_node_count']
+ self.min_node_count = kwargs.get('min_node_count', 0)
+ self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None)
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = kwargs['client_id']
+ self.client_secret = kwargs['client_secret']
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.private_link_resource_id = kwargs.get('private_link_resource_id', None)
+ self.group_id = kwargs.get('group_id', None)
+ self.request_message = kwargs.get('request_message', None)
+ self.status = kwargs.get('status', None)
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.tier = kwargs.get('tier', None)
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.value = kwargs.get('value', None)
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class SkuSettings(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuSettings, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = kwargs.get('restrictions', None)
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.cert = kwargs.get('cert', None)
+ self.key = kwargs.get('key', None)
+ self.cname = kwargs.get('cname', None)
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = kwargs.get('limit', None)
+ self.unit = None
+ self.status = kwargs.get('status', None)
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = kwargs['admin_user_name']
+ self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None)
+ self.admin_user_password = kwargs.get('admin_user_password', None)
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = kwargs.get('virtual_machine_size', None)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ :param supported_compute_types: Specifies the compute types supported by the virtual machine
+ size.
+ :type supported_compute_types: list[str]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None)
+ self.supported_compute_types = kwargs.get('supported_compute_types', None)
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param aml_compute: The list of virtual machine sizes supported by AmlCompute.
+ :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.aml_compute = kwargs.get('aml_compute', None)
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = kwargs.get('username', None)
+ self.password = kwargs.get('password', None)
+ self.public_key_data = kwargs.get('public_key_data', None)
+ self.private_key_data = kwargs.get('private_key_data', None)
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format.
+ :vartype creation_time: ~datetime.datetime
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'creation_time': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(**kwargs)
+ self.workspace_id = None
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.creation_time = None
+ self.key_vault = kwargs.get('key_vault', None)
+ self.application_insights = kwargs.get('application_insights', None)
+ self.container_registry = kwargs.get('container_registry', None)
+ self.storage_account = kwargs.get('storage_account', None)
+ self.discovery_url = kwargs.get('discovery_url', None)
+ self.provisioning_state = None
+ self.encryption = kwargs.get('encryption', None)
+ self.hbi_workspace = kwargs.get('hbi_workspace', False)
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False)
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None)
+ self.notebook_info = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+
+
+class WorkspaceConnectionDto(msrest.serialization.Model):
+ """object used for creating workspace connection.
+
+ :param name: Friendly name of the workspace connection.
+ :type name: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnectionDto, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """AML workspace sku information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar skus: The list of workspace sku settings.
+ :vartype skus: list[~azure_machine_learning_workspaces.models.SkuSettings]
+ """
+
+ _validation = {
+ 'resource_type': {'readonly': True},
+ 'skus': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'skus': {'key': 'skus', 'type': '[SkuSettings]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.resource_type = None
+ self.skus = None
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
new file mode 100644
index 00000000000..0eaa2638e0e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
@@ -0,0 +1,4007 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_machine_learning_workspaces_enums import *
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["AksProperties"] = None,
+ **kwargs
+ ):
+ super(Aks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = properties
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_kube_config: Optional[str] = None,
+ admin_kube_config: Optional[str] = None,
+ image_pull_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = user_kube_config
+ self.admin_kube_config = admin_kube_config
+ self.image_pull_secret_name = image_pull_secret_name
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ subnet_id: Optional[str] = None,
+ service_cidr: Optional[str] = None,
+ dns_service_ip: Optional[str] = None,
+ docker_bridge_cidr: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = subnet_id
+ self.service_cidr = service_cidr
+ self.dns_service_ip = dns_service_ip
+ self.docker_bridge_cidr = docker_bridge_cidr
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 1},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cluster_fqdn: Optional[str] = None,
+ agent_count: Optional[int] = None,
+ agent_vm_size: Optional[str] = None,
+ ssl_configuration: Optional["SslConfiguration"] = None,
+ aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = cluster_fqdn
+ self.system_services = None
+ self.agent_count = agent_count
+ self.agent_vm_size = agent_vm_size
+ self.ssl_configuration = ssl_configuration
+ self.aks_networking_configuration = aks_networking_configuration
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["AmlComputeProperties"] = None,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = properties
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ }
+
+ def __init__(
+ self,
+ *,
+ os_type: Optional[Union[str, "OsType"]] = "Linux",
+ vm_size: Optional[str] = None,
+ vm_priority: Optional[Union[str, "VmPriority"]] = None,
+ scale_settings: Optional["ScaleSettings"] = None,
+ user_account_credentials: Optional["UserAccountCredentials"] = None,
+ subnet: Optional["ResourceId"] = None,
+ remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified",
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = os_type
+ self.vm_size = vm_size
+ self.vm_priority = vm_priority
+ self.scale_settings = scale_settings
+ self.user_account_credentials = user_account_credentials
+ self.subnet = subnet
+ self.remote_login_port_public_access = remote_login_port_public_access
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ display_name: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = id
+ self.display_name = display_name
+ self.description = description
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ object_id: str,
+ tenant_id: str,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = object_id
+ self.tenant_id = tenant_id
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scale_settings: Optional["ScaleSettings"] = None,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = scale_settings
+
+
+class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model):
+ """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal id of user assigned identity.
+ :vartype principal_id: str
+ :ivar client_id: The client id of user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs)
+ self.principal_id = None
+ self.client_id = None
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["ComputeInstanceProperties"] = None,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = properties
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ endpoint_uri: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.endpoint_uri = endpoint_uri
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for a ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ operation_name: Optional[Union[str, "OperationName"]] = None,
+ operation_time: Optional[datetime.datetime] = None,
+ operation_status: Optional[Union[str, "OperationStatus"]] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = operation_name
+ self.operation_time = operation_time
+ self.operation_status = operation_status
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vm_size: Optional[str] = None,
+ subnet: Optional["ResourceId"] = None,
+ application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared",
+ ssh_settings: Optional["ComputeInstanceSshSettings"] = None,
+ compute_instance_authorization_type: Optional[Union[str, "ComputeInstanceAuthorizationType"]] = "personal",
+ personal_compute_instance_settings: Optional["PersonalComputeInstanceSettings"] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = vm_size
+ self.subnet = subnet
+ self.application_sharing_policy = application_sharing_policy
+ self.ssh_settings = ssh_settings
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = compute_instance_authorization_type
+ self.personal_compute_instance_settings = personal_compute_instance_settings
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled",
+ admin_public_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = ssh_public_access
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = admin_public_key
+
+
+class Resource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.identity = identity
+ self.location = location
+ self.type = None
+ self.tags = tags
+ self.sku = sku
+
+
+class ComputeResource(Resource):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ properties: Optional["Compute"] = None,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.properties = properties
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["DatabricksProperties"] = None,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = properties
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = databricks_access_token
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = databricks_access_token
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["DataLakeAnalyticsProperties"] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = properties
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_lake_store_account_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = data_lake_store_account_name
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Union[str, "EncryptionStatus"],
+ key_vault_properties: "KeyVaultProperties",
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = status
+ self.key_vault_properties = key_vault_properties
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """Error detail information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code: Required. Error code.
+ :type code: str
+ :param message: Required. Error message.
+ :type message: str
+ """
+
+ _validation = {
+ 'code': {'required': True},
+ 'message': {'required': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ code: str,
+ message: str,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = code
+ self.message = message
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Error response information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.details = None
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ retail_price: float,
+ os_type: Union[str, "VmPriceOsType"],
+ vm_tier: Union[str, "VmTier"],
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = retail_price
+ self.os_type = os_type
+ self.vm_tier = vm_tier
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ billing_currency: Union[str, "BillingCurrency"],
+ unit_of_measure: Union[str, "UnitOfMeasure"],
+ values: List["EstimatedVmPrice"],
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = billing_currency
+ self.unit_of_measure = unit_of_measure
+ self.values = values
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["HdInsightProperties"] = None,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = properties
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: Required. The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The list of user identities associated with resource. The user
+ identity dictionary key references will be ARM resource ids in the form:
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Union[str, "ResourceIdentityType"],
+ user_assigned_identities: Optional[Dict[str, "ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key_vault_arm_id: str,
+ key_identifier: str,
+ identity_client_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = key_vault_arm_id
+ self.key_identifier = key_identifier
+ self.identity_client_id = identity_client_id
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :param notebook_access_keys:
+ :type notebook_access_keys:
+ ~azure_machine_learning_workspaces.models.NotebookListCredentialsResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'},
+ }
+
+ def __init__(
+ self,
+ *,
+ notebook_access_keys: Optional["NotebookListCredentialsResult"] = None,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = notebook_access_keys
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class MachineLearningServiceError(msrest.serialization.Model):
+ """Wrapper for error response to follow ARM guidelines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MachineLearningServiceError, self).__init__(**kwargs)
+ self.error = None
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookListCredentialsResult(msrest.serialization.Model):
+ """NotebookListCredentialsResult.
+
+ :param primary_access_key:
+ :type primary_access_key: str
+ :param secondary_access_key:
+ :type secondary_access_key: str
+ """
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_access_key: Optional[str] = None,
+ secondary_access_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(NotebookListCredentialsResult, self).__init__(**kwargs)
+ self.primary_access_key = primary_access_key
+ self.secondary_access_key = secondary_access_key
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error_message: Optional[str] = None,
+ status_code: Optional[int] = None,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = error_message
+ self.status_code = status_code
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ fqdn: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ notebook_preparation_error: Optional["NotebookPreparationError"] = None,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = fqdn
+ self.resource_id = resource_id
+ self.notebook_preparation_error = notebook_preparation_error
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ display: Optional["OperationDisplay"] = None,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = name
+ self.display = display
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ provider: Optional[str] = None,
+ resource: Optional[str] = None,
+ operation: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = provider
+ self.resource = resource
+ self.operation = operation
+ self.description = description
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Operation"]] = None,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ComputeResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceConnection"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ *,
+ assigned_user: Optional["AssignedUser"] = None,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = assigned_user
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+
+
+class PrivateEndpointConnection(msrest.serialization.Model):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the private endpoint connection.
+ :vartype id: str
+ :ivar name: Friendly name of the private endpoint connection.
+ :vartype name: str
+ :ivar type: Resource type of private endpoint connection.
+ :vartype type: str
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ private_endpoint: Optional["PrivateEndpoint"] = None,
+ private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.private_endpoint = private_endpoint
+ self.private_link_service_connection_state = private_link_service_connection_state
+ self.provisioning_state = None
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ required_zone_names: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = required_zone_names
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PrivateLinkResource"]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ description: Optional[str] = None,
+ actions_required: Optional[str] = None,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = status
+ self.description = description
+ self.actions_required = actions_required
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ type: Optional[str] = None,
+ limit: Optional[int] = None,
+ unit: Optional[Union[str, "QuotaUnit"]] = None,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = id
+ self.type = type
+ self.limit = limit
+ self.unit = unit
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["QuotaBaseProperties"]] = None,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = value
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ passwords: Optional[List["Password"]] = None,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = passwords
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = id
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ reason_code: Optional[Union[str, "ReasonCode"]] = None,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = reason_code
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_node_count: int,
+ min_node_count: Optional[int] = 0,
+ node_idle_time_before_scale_down: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = max_node_count
+ self.min_node_count = min_node_count
+ self.node_idle_time_before_scale_down = node_idle_time_before_scale_down
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: str,
+ client_secret: str,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ private_link_resource_id: Optional[str] = None,
+ group_id: Optional[str] = None,
+ request_message: Optional[str] = None,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = name
+ self.private_link_resource_id = private_link_resource_id
+ self.group_id = group_id
+ self.request_message = request_message
+ self.status = status
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ tier: Optional[str] = None,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = name
+ self.tier = tier
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceSku"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class SkuSettings(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ restrictions: Optional[List["Restriction"]] = None,
+ **kwargs
+ ):
+ super(SkuSettings, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = restrictions
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "SslConfigurationStatus"]] = None,
+ cert: Optional[str] = None,
+ key: Optional[str] = None,
+ cname: Optional[str] = None,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = status
+ self.cert = cert
+ self.key = key
+ self.cname = cname
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ limit: Optional[int] = None,
+ status: Optional[Union[str, "Status"]] = None,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = limit
+ self.unit = None
+ self.status = status
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ admin_user_name: str,
+ admin_user_ssh_public_key: Optional[str] = None,
+ admin_user_password: Optional[str] = None,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = admin_user_name
+ self.admin_user_ssh_public_key = admin_user_ssh_public_key
+ self.admin_user_password = admin_user_password
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The date and time when the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The date and time when the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ properties: Optional["VirtualMachineProperties"] = None,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = properties
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ virtual_machine_size: Optional[str] = None,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = virtual_machine_size
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = administrator_account
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ :param supported_compute_types: Specifies the compute types supported by the virtual machine
+ size.
+ :type supported_compute_types: list[str]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ estimated_vm_prices: Optional["EstimatedVmPrices"] = None,
+ supported_compute_types: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = estimated_vm_prices
+ self.supported_compute_types = supported_compute_types
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param aml_compute: The list of virtual machine sizes supported by AmlCompute.
+ :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ aml_compute: Optional[List["VirtualMachineSize"]] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.aml_compute = aml_compute
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ public_key_data: Optional[str] = None,
+ private_key_data: Optional[str] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = username
+ self.password = password
+ self.public_key_data = public_key_data
+ self.private_key_data = private_key_data
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format.
+ :vartype creation_time: ~datetime.datetime
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'creation_time': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ key_vault: Optional[str] = None,
+ application_insights: Optional[str] = None,
+ container_registry: Optional[str] = None,
+ storage_account: Optional[str] = None,
+ discovery_url: Optional[str] = None,
+ encryption: Optional["EncryptionProperty"] = None,
+ hbi_workspace: Optional[bool] = False,
+ image_build_compute: Optional[str] = None,
+ allow_public_access_when_behind_vnet: Optional[bool] = False,
+ shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.workspace_id = None
+ self.description = description
+ self.friendly_name = friendly_name
+ self.creation_time = None
+ self.key_vault = key_vault
+ self.application_insights = application_insights
+ self.container_registry = container_registry
+ self.storage_account = storage_account
+ self.discovery_url = discovery_url
+ self.provisioning_state = None
+ self.encryption = encryption
+ self.hbi_workspace = hbi_workspace
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = image_build_compute
+ self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = shared_private_link_resources
+ self.notebook_info = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+
+
+class WorkspaceConnectionDto(msrest.serialization.Model):
+ """object used for creating workspace connection.
+
+ :param name: Friendly name of the workspace connection.
+ :type name: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnectionDto, self).__init__(**kwargs)
+ self.name = name
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Workspace"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """AML workspace sku information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar skus: The list of workspace sku settings.
+ :vartype skus: list[~azure_machine_learning_workspaces.models.SkuSettings]
+ """
+
+ _validation = {
+ 'resource_type': {'readonly': True},
+ 'skus': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'skus': {'key': 'skus', 'type': '[SkuSettings]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.resource_type = None
+ self.skus = None
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = tags
+ self.sku = sku
+ self.description = description
+ self.friendly_name = friendly_name
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
new file mode 100644
index 00000000000..516999b100d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
@@ -0,0 +1,35 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._notebooks_operations import NotebooksOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._machine_learning_compute_operations import MachineLearningComputeOperations
+from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'WorkspaceFeaturesOperations',
+ 'NotebooksOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'WorkspaceConnectionsOperations',
+ 'MachineLearningComputeOperations',
+ 'AzureMachineLearningWorkspacesOperationsMixin',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py
new file mode 100644
index 00000000000..cca7b84e78d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py
@@ -0,0 +1,94 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class AzureMachineLearningWorkspacesOperationsMixin(object):
+
+ def list_skus(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.SkuListResult"]
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_skus.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py
new file mode 100644
index 00000000000..6d20047572a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py
@@ -0,0 +1,931 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningComputeOperations(object):
+ """MachineLearningComputeOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedComputeResourcesList"]
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.AmlComputeNodesInformation"]
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeSecrets"
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ def start(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.start.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def stop(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.stop.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def restart(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py
new file mode 100644
index 00000000000..9e541bca756
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py
@@ -0,0 +1,166 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class NotebooksOperations(object):
+ """NotebooksOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def _prepare_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.NotebookResourceInfo"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def begin_prepare(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.NotebookResourceInfo"]
+ """prepare.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._prepare_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
new file mode 100644
index 00000000000..9b568ce1c77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations(object):
+ """Operations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OperationListResult"]
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..642ba9195ff
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,304 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations(object):
+ """PrivateEndpointConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def put(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ properties, # type: "models.PrivateEndpointConnection"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.put.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..9e2314373fc
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
@@ -0,0 +1,104 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations(object):
+ """PrivateLinkResourcesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateLinkResourceListResult"
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
new file mode 100644
index 00000000000..025247e1fad
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
@@ -0,0 +1,181 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations(object):
+ """QuotasOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def update(
+ self,
+ location, # type: str
+ parameters, # type: "models.QuotaUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.UpdateWorkspaceQuotasResult"
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListWorkspaceQuotas"]
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
new file mode 100644
index 00000000000..fe87b53386f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations(object):
+ """UsagesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListUsagesResult"]
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..64960e85c76
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations(object):
+ """VirtualMachineSizesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ compute_type=None, # type: Optional[str]
+ recommended=None, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.VirtualMachineSizeListResult"
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :param compute_type: Type of compute to filter by.
+ :type compute_type: str
+ :param recommended: Specifies whether to return recommended vm sizes or all vm sizes.
+ :type recommended: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if compute_type is not None:
+ query_parameters['compute-type'] = self._serialize.query("compute_type", compute_type, 'str')
+ if recommended is not None:
+ query_parameters['recommended'] = self._serialize.query("recommended", recommended, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..3489ddae6eb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
@@ -0,0 +1,329 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations(object):
+ """WorkspaceConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ target=None, # type: Optional[str]
+ category=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedWorkspaceConnectionsList"]
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ def create(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ parameters, # type: "models.WorkspaceConnectionDto"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Add a new workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnectionDto
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..f28d14b7bbd
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
@@ -0,0 +1,122 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations(object):
+ """WorkspaceFeaturesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListAmlUserFeatureResult"]
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..1df9daf3a21
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
@@ -0,0 +1,688 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations(object):
+ """WorkspacesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.Workspace"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.Workspace"]
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.WorkspaceUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name, # type: str
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListWorkspaceKeysResult"
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ def resync_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.resync_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skiptoken=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skiptoken: Continuation token for pagination.
+ :type skiptoken: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-08-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skiptoken is not None:
+ query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
new file mode 100644
index 00000000000..e5aff4f83af
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/src/machinelearningservices/report.md b/src/machinelearningservices/report.md
new file mode 100644
index 00000000000..054358d8c23
--- /dev/null
+++ b/src/machinelearningservices/report.md
@@ -0,0 +1,969 @@
+# Azure CLI Module Creation Report
+
+## EXTENSION
+|CLI Extension|Command Groups|
+|---------|------------|
+|az machinelearningservices|[groups](#CommandGroups)
+
+## GROUPS
+### Command groups in `az machinelearningservices` extension
+|CLI Command Group|Group Swagger name|Commands|
+|---------|------------|--------|
+|az machinelearningservices workspace|Workspaces|[commands](#CommandsInWorkspaces)|
+|az machinelearningservices workspace-feature|WorkspaceFeatures|[commands](#CommandsInWorkspaceFeatures)|
+|az machinelearningservices notebook|Notebooks|[commands](#CommandsInNotebooks)|
+|az machinelearningservices usage|Usages|[commands](#CommandsInUsages)|
+|az machinelearningservices virtual-machine-size|VirtualMachineSizes|[commands](#CommandsInVirtualMachineSizes)|
+|az machinelearningservices quota|Quotas|[commands](#CommandsInQuotas)|
+|az machinelearningservices workspace-connection|WorkspaceConnections|[commands](#CommandsInWorkspaceConnections)|
+|az machinelearningservices machine-learning-compute|MachineLearningCompute|[commands](#CommandsInMachineLearningCompute)|
+|az machinelearningservices||[commands](#CommandsIn)|
+|az machinelearningservices private-endpoint-connection|PrivateEndpointConnections|[commands](#CommandsInPrivateEndpointConnections)|
+|az machinelearningservices private-link-resource|PrivateLinkResources|[commands](#CommandsInPrivateLinkResources)|
+
+## COMMANDS
+### Commands in `az machinelearningservices` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices list-sku](#ListSkus)|ListSkus|[Parameters](#ParametersListSkus)|[Example](#ExamplesListSkus)|
+
+### Commands in `az machinelearningservices machine-learning-compute` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices machine-learning-compute list](#MachineLearningComputeListByWorkspace)|ListByWorkspace|[Parameters](#ParametersMachineLearningComputeListByWorkspace)|[Example](#ExamplesMachineLearningComputeListByWorkspace)|
+|[az machinelearningservices machine-learning-compute show](#MachineLearningComputeGet)|Get|[Parameters](#ParametersMachineLearningComputeGet)|[Example](#ExamplesMachineLearningComputeGet)|
+|[az machinelearningservices machine-learning-compute aks create](#MachineLearningComputeCreateOrUpdate#Create#AKS)|CreateOrUpdate#Create#AKS|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#AKS)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#AKS)|
+|[az machinelearningservices machine-learning-compute aml-compute create](#MachineLearningComputeCreateOrUpdate#Create#AmlCompute)|CreateOrUpdate#Create#AmlCompute|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#AmlCompute)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#AmlCompute)|
+|[az machinelearningservices machine-learning-compute compute-instance create](#MachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|CreateOrUpdate#Create#ComputeInstance|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|
+|[az machinelearningservices machine-learning-compute data-factory create](#MachineLearningComputeCreateOrUpdate#Create#DataFactory)|CreateOrUpdate#Create#DataFactory|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#DataFactory)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#DataFactory)|
+|[az machinelearningservices machine-learning-compute data-lake-analytics create](#MachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|CreateOrUpdate#Create#DataLakeAnalytics|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|
+|[az machinelearningservices machine-learning-compute databricks create](#MachineLearningComputeCreateOrUpdate#Create#Databricks)|CreateOrUpdate#Create#Databricks|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#Databricks)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#Databricks)|
+|[az machinelearningservices machine-learning-compute hd-insight create](#MachineLearningComputeCreateOrUpdate#Create#HDInsight)|CreateOrUpdate#Create#HDInsight|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#HDInsight)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#HDInsight)|
+|[az machinelearningservices machine-learning-compute virtual-machine create](#MachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|CreateOrUpdate#Create#VirtualMachine|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|
+|[az machinelearningservices machine-learning-compute update](#MachineLearningComputeUpdate)|Update|[Parameters](#ParametersMachineLearningComputeUpdate)|[Example](#ExamplesMachineLearningComputeUpdate)|
+|[az machinelearningservices machine-learning-compute delete](#MachineLearningComputeDelete)|Delete|[Parameters](#ParametersMachineLearningComputeDelete)|[Example](#ExamplesMachineLearningComputeDelete)|
+|[az machinelearningservices machine-learning-compute list-key](#MachineLearningComputeListKeys)|ListKeys|[Parameters](#ParametersMachineLearningComputeListKeys)|[Example](#ExamplesMachineLearningComputeListKeys)|
+|[az machinelearningservices machine-learning-compute list-node](#MachineLearningComputeListNodes)|ListNodes|[Parameters](#ParametersMachineLearningComputeListNodes)|[Example](#ExamplesMachineLearningComputeListNodes)|
+|[az machinelearningservices machine-learning-compute restart](#MachineLearningComputeRestart)|Restart|[Parameters](#ParametersMachineLearningComputeRestart)|[Example](#ExamplesMachineLearningComputeRestart)|
+|[az machinelearningservices machine-learning-compute start](#MachineLearningComputeStart)|Start|[Parameters](#ParametersMachineLearningComputeStart)|[Example](#ExamplesMachineLearningComputeStart)|
+|[az machinelearningservices machine-learning-compute stop](#MachineLearningComputeStop)|Stop|[Parameters](#ParametersMachineLearningComputeStop)|[Example](#ExamplesMachineLearningComputeStop)|
+
+### Commands in `az machinelearningservices notebook` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices notebook prepare](#NotebooksPrepare)|Prepare|[Parameters](#ParametersNotebooksPrepare)|[Example](#ExamplesNotebooksPrepare)|
+
+### Commands in `az machinelearningservices private-endpoint-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-endpoint-connection show](#PrivateEndpointConnectionsGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionsGet)|[Example](#ExamplesPrivateEndpointConnectionsGet)|
+|[az machinelearningservices private-endpoint-connection delete](#PrivateEndpointConnectionsDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionsDelete)|[Example](#ExamplesPrivateEndpointConnectionsDelete)|
+|[az machinelearningservices private-endpoint-connection put](#PrivateEndpointConnectionsPut)|Put|[Parameters](#ParametersPrivateEndpointConnectionsPut)|[Example](#ExamplesPrivateEndpointConnectionsPut)|
+
+### Commands in `az machinelearningservices private-link-resource` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-link-resource list](#PrivateLinkResourcesListByWorkspace)|ListByWorkspace|[Parameters](#ParametersPrivateLinkResourcesListByWorkspace)|[Example](#ExamplesPrivateLinkResourcesListByWorkspace)|
+
+### Commands in `az machinelearningservices quota` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices quota list](#QuotasList)|List|[Parameters](#ParametersQuotasList)|[Example](#ExamplesQuotasList)|
+|[az machinelearningservices quota update](#QuotasUpdate)|Update|[Parameters](#ParametersQuotasUpdate)|[Example](#ExamplesQuotasUpdate)|
+
+### Commands in `az machinelearningservices usage` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices usage list](#UsagesList)|List|[Parameters](#ParametersUsagesList)|[Example](#ExamplesUsagesList)|
+
+### Commands in `az machinelearningservices virtual-machine-size` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices virtual-machine-size list](#VirtualMachineSizesList)|List|[Parameters](#ParametersVirtualMachineSizesList)|[Example](#ExamplesVirtualMachineSizesList)|
+
+### Commands in `az machinelearningservices workspace` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace list](#WorkspacesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersWorkspacesListByResourceGroup)|[Example](#ExamplesWorkspacesListByResourceGroup)|
+|[az machinelearningservices workspace list](#WorkspacesListBySubscription)|ListBySubscription|[Parameters](#ParametersWorkspacesListBySubscription)|[Example](#ExamplesWorkspacesListBySubscription)|
+|[az machinelearningservices workspace show](#WorkspacesGet)|Get|[Parameters](#ParametersWorkspacesGet)|[Example](#ExamplesWorkspacesGet)|
+|[az machinelearningservices workspace create](#WorkspacesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersWorkspacesCreateOrUpdate#Create)|[Example](#ExamplesWorkspacesCreateOrUpdate#Create)|
+|[az machinelearningservices workspace update](#WorkspacesUpdate)|Update|[Parameters](#ParametersWorkspacesUpdate)|[Example](#ExamplesWorkspacesUpdate)|
+|[az machinelearningservices workspace delete](#WorkspacesDelete)|Delete|[Parameters](#ParametersWorkspacesDelete)|[Example](#ExamplesWorkspacesDelete)|
+|[az machinelearningservices workspace list-key](#WorkspacesListKeys)|ListKeys|[Parameters](#ParametersWorkspacesListKeys)|[Example](#ExamplesWorkspacesListKeys)|
+|[az machinelearningservices workspace resync-key](#WorkspacesResyncKeys)|ResyncKeys|[Parameters](#ParametersWorkspacesResyncKeys)|[Example](#ExamplesWorkspacesResyncKeys)|
+
+### Commands in `az machinelearningservices workspace-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-connection list](#WorkspaceConnectionsList)|List|[Parameters](#ParametersWorkspaceConnectionsList)|[Example](#ExamplesWorkspaceConnectionsList)|
+|[az machinelearningservices workspace-connection show](#WorkspaceConnectionsGet)|Get|[Parameters](#ParametersWorkspaceConnectionsGet)|[Example](#ExamplesWorkspaceConnectionsGet)|
+|[az machinelearningservices workspace-connection create](#WorkspaceConnectionsCreate)|Create|[Parameters](#ParametersWorkspaceConnectionsCreate)|[Example](#ExamplesWorkspaceConnectionsCreate)|
+|[az machinelearningservices workspace-connection delete](#WorkspaceConnectionsDelete)|Delete|[Parameters](#ParametersWorkspaceConnectionsDelete)|[Example](#ExamplesWorkspaceConnectionsDelete)|
+
+### Commands in `az machinelearningservices workspace-feature` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-feature list](#WorkspaceFeaturesList)|List|[Parameters](#ParametersWorkspaceFeaturesList)|[Example](#ExamplesWorkspaceFeaturesList)|
+
+
+## COMMAND DETAILS
+
+### group `az machinelearningservices`
+#### Command `az machinelearningservices list-sku`
+
+##### Example
+```
+az machinelearningservices list-sku
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+### group `az machinelearningservices machine-learning-compute`
+#### Command `az machinelearningservices machine-learning-compute list`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices machine-learning-compute show`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute aks create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --ak-s-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\
+\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\
+\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\
+\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--ak-s-compute-location**|string|Location for the underlying compute|ak_s_compute_location|computeLocation|
+|**--ak-s-description**|string|The description of the Machine Learning compute.|ak_s_description|description|
+|**--ak-s-resource-id**|string|ARM resource id of the underlying compute|ak_s_resource_id|resourceId|
+|**--ak-s-properties**|object|AKS properties|ak_s_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute aml-compute create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --aml-compute-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNod\
+eCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSi\
+ze\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Di\
+sabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|aml_compute_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|aml_compute_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|aml_compute_resource_id|resourceId|
+|**--aml-compute-properties**|object|AML Compute properties|aml_compute_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute compute-instance create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --compute-instance-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"m\
+axNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\\"Dedicated\\",\\\
+"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicA\
+ccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|compute_instance_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|compute_instance_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|compute_instance_resource_id|resourceId|
+|**--compute-instance-properties**|object|Compute Instance properties|compute_instance_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute data-factory create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|data_factory_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|data_factory_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|data_factory_resource_id|resourceId|
+
+#### Command `az machinelearningservices machine-learning-compute data-lake-analytics create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|data_lake_analytics_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|data_lake_analytics_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|data_lake_analytics_resource_id|resourceId|
+|**--data-lake-store-account-name**|string|DataLake Store Account Name|data_lake_analytics_data_lake_store_account_name|dataLakeStoreAccountName|
+
+#### Command `az machinelearningservices machine-learning-compute databricks create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|databricks_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|databricks_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|databricks_resource_id|resourceId|
+|**--databricks-access-token**|string|Databricks access token|databricks_databricks_access_token|databricksAccessToken|
+
+#### Command `az machinelearningservices machine-learning-compute hd-insight create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|hd_insight_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|hd_insight_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|hd_insight_resource_id|resourceId|
+|**--ssh-port**|integer|Port open for ssh connections on the master node of the cluster.|hd_insight_ssh_port|sshPort|
+|**--address**|string|Public IP address of the master node of the cluster.|hd_insight_address|address|
+|**--administrator-account**|object|Admin credentials for master node of the cluster|hd_insight_administrator_account|administratorAccount|
+
+#### Command `az machinelearningservices machine-learning-compute virtual-machine create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --type \
+"SystemAssigned,UserAssigned" --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resou\
+rceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{}}" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|virtual_machine_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|virtual_machine_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|virtual_machine_resource_id|resourceId|
+|**--virtual-machine-size**|string|Virtual Machine size|virtual_machine_virtual_machine_size|virtualMachineSize|
+|**--ssh-port**|integer|Port open for ssh connections.|virtual_machine_ssh_port|sshPort|
+|**--address**|string|Public IP address of the virtual machine.|virtual_machine_address|address|
+|**--administrator-account**|object|Admin credentials for virtual machine|virtual_machine_administrator_account|administratorAccount|
+
+#### Command `az machinelearningservices machine-learning-compute update`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute update --compute-name "compute123" --scale-settings \
+max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--scale-settings**|object|Desired scale settings for the amlCompute.|scale_settings|scaleSettings|
+
+#### Command `az machinelearningservices machine-learning-compute delete`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group "testrg123" \
+--underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--underlying-resource-action**|choice|Delete the underlying compute if 'Delete', or detach the underlying compute from workspace if 'Detach'.|underlying_resource_action|underlyingResourceAction|
+
+#### Command `az machinelearningservices machine-learning-compute list-key`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list-key --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute list-node`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list-node --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute restart`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute restart --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute start`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute stop`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+### group `az machinelearningservices notebook`
+#### Command `az machinelearningservices notebook prepare`
+
+##### Example
+```
+az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices private-endpoint-connection`
+#### Command `az machinelearningservices private-endpoint-connection show`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" --resource-group \
+"rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection delete`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection put`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState|
+
+### group `az machinelearningservices private-link-resource`
+#### Command `az machinelearningservices private-link-resource list`
+
+##### Example
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices quota`
+#### Command `az machinelearningservices quota list`
+
+##### Example
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+#### Command `az machinelearningservices quota update`
+
+##### Example
+```
+az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServices/workspaces/\
+dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.Ma\
+chineLearningServices/workspaces/demo_workspace1/quotas/StandardDSv2Family" limit=100 unit="Count" --value \
+type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000-0000-00\
+0000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/StandardDSv2\
+Family" limit=200 unit="Count"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for update quota is queried.|location|location|
+|**--value**|array|The list for update quota.|value|value|
+
+### group `az machinelearningservices usage`
+#### Command `az machinelearningservices usage list`
+
+##### Example
+```
+az machinelearningservices usage list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+### group `az machinelearningservices virtual-machine-size`
+#### Command `az machinelearningservices virtual-machine-size list`
+
+##### Example
+```
+az machinelearningservices virtual-machine-size list --location "eastus" --recommended false
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location upon which virtual-machine-sizes is queried.|location|location|
+|**--compute-type**|string|Type of compute to filter by.|compute_type|compute-type|
+|**--recommended**|boolean|Specifies whether to return recommended vm sizes or all vm sizes|recommended|recommended|
+
+### group `az machinelearningservices workspace`
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken|
+
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+#### Command `az machinelearningservices workspace show`
+
+##### Example
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace create`
+
+##### Example
+```
+az machinelearningservices workspace create --type "SystemAssigned" --location "eastus2euap" --description "test \
+description" --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/\
+providers/microsoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-4444\
+44444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \
+--key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee1122\
+33445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234\
+/providers/Microsoft.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false \
+--key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyV\
+ault/vaults/testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/0000\
+0000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbre\
+source/privateLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --sku name="Basic" tier="Basic" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace. This name in mutable|friendly_name|friendlyName|
+|**--key-vault**|string|ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created|key_vault|keyVault|
+|**--application-insights**|string|ARM id of the application insights associated with this workspace. This cannot be changed once the workspace has been created|application_insights|applicationInsights|
+|**--container-registry**|string|ARM id of the container registry associated with this workspace. This cannot be changed once the workspace has been created|container_registry|containerRegistry|
+|**--storage-account**|string|ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created|storage_account|storageAccount|
+|**--discovery-url**|string|Url for the discovery service to identify regional endpoints for machine learning experimentation services|discovery_url|discoveryUrl|
+|**--hbi-workspace**|boolean|The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service|hbi_workspace|hbiWorkspace|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--allow-public-access-when-behind-vnet**|boolean|The flag to indicate whether to allow public access when behind VNet.|allow_public_access_when_behind_vnet|allowPublicAccessWhenBehindVnet|
+|**--shared-private-link-resources**|array|The list of shared private link resources in this workspace.|shared_private_link_resources|sharedPrivateLinkResources|
+|**--status**|choice|Indicates whether or not the encryption is enabled for the workspace.|status|status|
+|**--key-vault-properties**|object|Customer Key vault properties.|key_vault_properties|keyVaultProperties|
+
+#### Command `az machinelearningservices workspace update`
+
+##### Example
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" --sku \
+name="Enterprise" tier="Enterprise" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--tags**|dictionary|The resource tags for the machine learning workspace.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace.|friendly_name|friendlyName|
+
+#### Command `az machinelearningservices workspace delete`
+
+##### Example
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-key`
+
+##### Example
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace resync-key`
+
+##### Example
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices workspace-connection`
+#### Command `az machinelearningservices workspace-connection list`
+
+##### Example
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" --target \
+"www.facebook.com" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--category**|string|Category of the workspace connection.|category|category|
+
+#### Command `az machinelearningservices workspace-connection show`
+
+##### Example
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+#### Command `az machinelearningservices workspace-connection create`
+
+##### Example
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --name "connection-1" \
+--auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+|**--name**|string|Friendly name of the workspace connection|name|name|
+|**--category**|string|Category of the workspace connection.|category|category|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--auth-type**|string|Authorization type of the workspace connection.|auth_type|authType|
+|**--value**|string|Value details of the workspace connection.|value|value|
+
+#### Command `az machinelearningservices workspace-connection delete`
+
+##### Example
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+### group `az machinelearningservices workspace-feature`
+#### Command `az machinelearningservices workspace-feature list`
+
+##### Example
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
diff --git a/src/machinelearningservices/setup.cfg b/src/machinelearningservices/setup.cfg
new file mode 100644
index 00000000000..2fdd96e5d39
--- /dev/null
+++ b/src/machinelearningservices/setup.cfg
@@ -0,0 +1 @@
+#setup.cfg
\ No newline at end of file
diff --git a/src/machinelearningservices/setup.py b/src/machinelearningservices/setup.py
new file mode 100644
index 00000000000..e4ec7166802
--- /dev/null
+++ b/src/machinelearningservices/setup.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# --------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------------------------
+
+
+from codecs import open
+from setuptools import setup, find_packages
+
+# HISTORY.rst entry.
+VERSION = '0.1.0'
+try:
+ from azext_machinelearningservices.manual.version import VERSION
+except ImportError:
+ pass
+
+# The full list of classifiers is available at
+# https://pypi.python.org/pypi?%3Aaction=list_classifiers
+CLASSIFIERS = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'License :: OSI Approved :: MIT License',
+]
+
+DEPENDENCIES = []
+
+try:
+ from azext_machinelearningservices.manual.dependency import DEPENDENCIES
+except ImportError:
+ pass
+
+with open('README.md', 'r', encoding='utf-8') as f:
+ README = f.read()
+with open('HISTORY.rst', 'r', encoding='utf-8') as f:
+ HISTORY = f.read()
+
+setup(
+ name='machinelearningservices',
+ version=VERSION,
+ description='Microsoft Azure Command-Line Tools AzureMachineLearningWorkspaces Extension',
+ author='Microsoft Corporation',
+ author_email='azpycli@microsoft.com',
+ url='https://github.com/Azure/azure-cli-extensions/tree/master/src/machinelearningservices',
+ long_description=README + '\n\n' + HISTORY,
+ license='MIT',
+ classifiers=CLASSIFIERS,
+ packages=find_packages(),
+ install_requires=DEPENDENCIES,
+ package_data={'azext_machinelearningservices': ['azext_metadata.json']},
+)