diff --git a/src/machinelearningservices/HISTORY.rst b/src/machinelearningservices/HISTORY.rst
new file mode 100644
index 00000000000..1c139576ba0
--- /dev/null
+++ b/src/machinelearningservices/HISTORY.rst
@@ -0,0 +1,8 @@
+.. :changelog:
+
+Release History
+===============
+
+0.1.0
+++++++
+* Initial release.
diff --git a/src/machinelearningservices/README.md b/src/machinelearningservices/README.md
new file mode 100644
index 00000000000..aa816f86a5a
--- /dev/null
+++ b/src/machinelearningservices/README.md
@@ -0,0 +1,730 @@
+# Azure CLI machinelearningservices Extension #
+This is the extension for machinelearningservices
+
+### How to use ###
+Install this extension using the below CLI command
+```
+az extension add --name machinelearningservices
+```
+
+### Included Features ###
+#### machinelearningservices workspace ####
+##### Create #####
+```
+az machinelearningservices workspace create \
+ --identity type="SystemAssigned,UserAssigned" userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testuai":{}} \
+ --location "eastus2euap" --description "test description" \
+ --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/microsoft.insights/components/testinsights" \
+ --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \
+ --identity user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testuai" \
+ --key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --status "Enabled" --friendly-name "HelloName" --hbi-workspace false \
+ --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" \
+ --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" \
+ --resource-group "workspace-1234" --name "testworkspace"
+
+az machinelearningservices workspace wait --created --resource-group "{rg}" --name "{myWorkspace}"
+```
+##### Show #####
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Update #####
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+ --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List-key #####
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### List-notebook-access-token #####
+```
+az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" \
+ --name "testworkspace"
+```
+##### List-notebook-key #####
+```
+az machinelearningservices workspace list-notebook-key --resource-group "testrg123" --name "workspaces123"
+```
+##### List-storage-account-key #####
+```
+az machinelearningservices workspace list-storage-account-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Prepare-notebook #####
+```
+az machinelearningservices workspace prepare-notebook --resource-group "testrg123" --name "workspaces123"
+```
+##### Resync-key #####
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+#### machinelearningservices usage ####
+##### List #####
+```
+az machinelearningservices usage list --location "eastus"
+```
+#### machinelearningservices virtual-machine-size ####
+##### List #####
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+#### machinelearningservices quota ####
+##### List #####
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Update #####
+```
+az machinelearningservices quota update --location "eastus" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+```
+#### machinelearningservices compute ####
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"AKS\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"DataFactory\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"schedules\\":{\\"computeStartStop\\":[{\\"action\\":\\"Stop\\",\\"cron\\":{\\"expression\\":\\"0 18 * * *\\",\\"startTime\\":\\"2021-04-23T01:30:00\\",\\"timeZone\\":\\"Pacific Standard Time\\"},\\"status\\":\\"Enabled\\",\\"triggerType\\":\\"Cron\\"}]},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List #####
+```
+az machinelearningservices compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Update #####
+```
+az machinelearningservices compute update --name "compute123" \
+ --scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### List-key #####
+```
+az machinelearningservices compute list-key --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List-node #####
+```
+az machinelearningservices compute list-node --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Restart #####
+```
+az machinelearningservices compute restart --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Start #####
+```
+az machinelearningservices compute start --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Stop #####
+```
+az machinelearningservices compute stop --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Update-schedule #####
+```
+az machinelearningservices compute update-schedule --name "compute123" \
+ --compute-start-stop "[{\\"action\\":\\"Start\\",\\"recurrence\\":{\\"frequency\\":\\"Day\\",\\"interval\\":1,\\"schedule\\":{\\"hours\\":[18],\\"minutes\\":[30],\\"weekDays\\":null},\\"startTime\\":\\"2021-04-23T01:30:00\\",\\"timeZone\\":\\"Pacific Standard Time\\"},\\"status\\":\\"Enabled\\",\\"triggerType\\":\\"Recurrence\\"}]" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices compute delete --name "compute123" --resource-group "testrg123" \
+ --underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+#### machinelearningservices private-endpoint-connection ####
+##### Create #####
+```
+az machinelearningservices private-endpoint-connection create --name "{privateEndpointConnectionName}" \
+ --private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices private-endpoint-connection list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices private-link-resource ####
+##### List #####
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices workspace-connection ####
+##### Create #####
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --auth-type "PAT" \
+ --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" \
+ --workspace-name "workspace-1"
+```
+##### Show #####
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### List #####
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" \
+ --target "www.facebook.com" --workspace-name "workspace-1"
+```
+##### Delete #####
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+#### machinelearningservices batch-endpoint ####
+##### Create #####
+```
+az machinelearningservices batch-endpoint create --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" --location "string" \
+ --properties description="string" authMode="AMLToken" keys={"primaryKey":"string","secondaryKey":"string"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} traffic={"myDeployment1":0,"myDeployment2":1} \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices batch-endpoint show --endpoint-name "testBatchEndpoint" \
+ --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices batch-endpoint list --count 1 --resource-group "resourceGroup-1234" \
+ --workspace-name "testworkspace"
+```
+##### Update #####
+```
+az machinelearningservices batch-endpoint update \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --endpoint-name "testBatchEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### List-key #####
+```
+az machinelearningservices batch-endpoint list-key --endpoint-name "testBatchEndpoint" \
+ --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices batch-endpoint delete --endpoint-name "testBatchEndpoint" \
+ --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices batch-deployment ####
+##### Create #####
+```
+az machinelearningservices batch-deployment create --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" --location "string" \
+ --properties description="string" codeConfiguration={"codeId":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/testcode/versions/1","scoringScript":"score.py"} compute={"instanceCount":0,"instanceType":"string","isLocal":false,"location":"string","properties":{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"},"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/testcompute"} environmentId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/myenv" environmentVariables={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} errorThreshold=0 loggingLevel="Info" miniBatchSize=0 model={"assetId":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/models/testmodel/versions/1","referenceType":"Id"} outputConfiguration={"appendRowFileName":"string","outputAction":"SummaryOnly"} partitionKeys="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} retrySettings={"maxRetries":0,"timeout":"string"} \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testBatchDeployment" --endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices batch-deployment show --deployment-name "testBatchDeployment" \
+ --endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices batch-deployment list --endpoint-name "testBatchEndpoint" \
+ --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Update #####
+```
+az machinelearningservices batch-deployment update \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testBatchDeployment" --endpoint-name "testBatchEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices batch-deployment delete --deployment-name "testBatchDeployment" \
+ --endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices code-container ####
+##### Create #####
+```
+az machinelearningservices code-container create --name "testContainer" \
+ --properties description="string" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices code-container show --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices code-container list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices code-container delete --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices code-version ####
+##### Create #####
+```
+az machinelearningservices code-version create --name "testContainer" \
+ --properties path="path/to/file.py" description="string" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastores/mydatastore" isAnonymous=true properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices code-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices code-version list --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices code-version delete --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices data-container ####
+##### Create #####
+```
+az machinelearningservices data-container create --name "datacontainer123" \
+ --properties description="string" properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices data-container show --name "datacontainer123" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices data-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices data-container delete --name "datacontainer123" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices data-version ####
+##### Create #####
+```
+az machinelearningservices data-version create --name "dataset123" \
+ --properties path="path/to/file.csv" description="string" datasetType="Simple" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastores/mydatastore" isAnonymous=true properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --version "1" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices data-version show --name "dataset123" --resource-group "testrg123" --version "1" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices data-version list --name "dataset123" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices data-version delete --name "dataset123" --resource-group "testrg123" --version "1" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices datastore ####
+##### Create #####
+```
+az machinelearningservices datastore create --name "testDatastore" \
+ --properties description="string" contents={"contentsType":"AzureDataLakeGen1","credentials":{"authorityUrl":"string","clientId":"00000000-1111-2222-3333-444444444444","credentialsType":"ServicePrincipal","resourceUri":"string","secrets":{"clientSecret":"string","secretsType":"ServicePrincipal"},"tenantId":"00000000-1111-2222-3333-444444444444"},"storeName":"testStore"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Create #####
+```
+az machinelearningservices datastore create --name "testDatastore" \
+ --properties description="string" contents={"accountName":"string","containerName":"string","contentsType":"AzureBlob","credentials":{"authorityUrl":"string","clientId":"00000000-1111-2222-3333-444444444444","credentialsType":"ServicePrincipal","resourceUri":"string","secrets":{"clientSecret":"string","secretsType":"ServicePrincipal"},"tenantId":"00000000-1111-2222-3333-444444444444"},"endpoint":"core.windows.net","protocol":"https"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Create #####
+```
+az machinelearningservices datastore create --name "testDatastore" \
+ --properties description="string" contents={"accountName":"string","containerName":"string","contentsType":"AzureFile","credentials":{"credentialsType":"AccountKey","secrets":{"key":"string","secretsType":"AccountKey"}},"endpoint":"core.windows.net","protocol":"https"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Create #####
+```
+az machinelearningservices datastore create --name "testDatastore" \
+ --properties description="string" contents={"contentsType":"AzurePostgreSql","credentials":{"credentialsType":"SqlAdmin","secrets":{"password":"string","secretsType":"SqlAdmin"},"userId":"string"},"databaseName":"string","enableSSL":true,"endpoint":"string","portNumber":123,"serverName":"string"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Create #####
+```
+az machinelearningservices datastore create --name "testDatastore" \
+ --properties description="string" contents={"contentsType":"AzureSqlDatabase","credentials":{"credentialsType":"SqlAdmin","secrets":{"password":"string","secretsType":"SqlAdmin"},"userId":"string"},"databaseName":"string","endpoint":"string","portNumber":123,"serverName":"string"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Create #####
+```
+az machinelearningservices datastore create --name "testDatastore" \
+ --properties description="string" contents={"accountName":"string","containerName":"string","contentsType":"AzureBlob","credentials":{"credentialsType":"AccountKey","secrets":{"key":"string","secretsType":"AccountKey"}},"endpoint":"core.windows.net","protocol":"https"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices datastore list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices datastore show --name "testDatastore" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List-secret #####
+```
+az machinelearningservices datastore list-secret --name "testDatastore" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices datastore delete --name "testDatastore" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices environment-container ####
+##### Create #####
+```
+az machinelearningservices environment-container create --name "testEnvironment" \
+ --properties description="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices environment-container show --name "testEnvironment" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices environment-container list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices environment-container delete --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices environment-specification-version ####
+##### Create #####
+```
+az machinelearningservices environment-specification-version create --name "testEnvironment" \
+ --properties description="string" condaFile="channels:\\n- defaults\\ndependencies:\\n- python=3.7.7\\nname: my-env" docker={"dockerSpecificationType":"Build","dockerfile":"FROM myimage"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices environment-specification-version show --name "testEnvironment" \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices environment-specification-version list --name "testEnvironment" \
+ --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices environment-specification-version delete --name "testContainer" \
+ --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+#### machinelearningservices job ####
+##### Create #####
+```
+az machinelearningservices job create \
+ --properties "{\\"description\\":\\"string\\",\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/mycode/versions/1\\",\\"command\\":\\"python file.py test\\",\\"compute\\":{\\"instanceCount\\":1,\\"target\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mycompute\\"},\\"distribution\\":{\\"distributionType\\":\\"PyTorch\\",\\"processCount\\":2},\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/AzureML-Tutorial/versions/1\\",\\"environmentVariables\\":{\\"MY_ENV_VAR1\\":\\"string\\",\\"MY_ENV_VAR2\\":\\"string\\"},\\"experimentName\\":\\"myExperiment\\",\\"identity\\":{\\"identityType\\":\\"AMLToken\\"},\\"inputDataBindings\\":{\\"test\\":{\\"dataId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/data/mydataset/versions/1\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"jobType\\":\\"Command\\",\\"outputDataBindings\\":{\\"test\\":{\\"datastoreId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastore/mydatastore\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"timeout\\":\\"PT1M\\"}" \
+ --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Create #####
+```
+az machinelearningservices job create \
+ --properties "{\\"description\\":\\"string\\",\\"algorithm\\":\\"Grid\\",\\"compute\\":{\\"instanceCount\\":1,\\"target\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mycompute\\"},\\"identity\\":{\\"identityType\\":\\"AMLToken\\"},\\"jobType\\":\\"Sweep\\",\\"maxConcurrentTrials\\":1,\\"maxTotalTrials\\":1,\\"objective\\":{\\"goal\\":\\"Minimize\\",\\"primaryMetric\\":\\"string\\"},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"searchSpace\\":{\\"name\\":{}},\\"tags\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"timeout\\":\\"PT1M\\",\\"trial\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/mycode/versions/1\\",\\"command\\":\\"python file.py test\\",\\"distribution\\":{\\"distributionType\\":\\"PyTorch\\",\\"processCount\\":2},\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/AzureML-Tutorial/versions/1\\",\\"environmentVariables\\":{\\"MY_ENV_VAR1\\":\\"string\\",\\"MY_ENV_VAR2\\":\\"string\\"},\\"inputDataBindings\\":{\\"test\\":{\\"dataId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/data/mydataset/versions/1\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"outputDataBindings\\":{\\"test\\":{\\"datastoreId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastore/mydatastore\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"timeout\\":\\"PT1M\\"}}" \
+ --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices job show --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices job show --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices job list --job-type "Command" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices job list --job-type "Sweep" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Cancel #####
+```
+az machinelearningservices job cancel --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices job delete --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+#### machinelearningservices labeling-job ####
+##### Create #####
+```
+az machinelearningservices labeling-job create \
+ --properties description="string" datasetConfiguration={"assetName":"myAsset","datasetVersion":"1","incrementalDatasetRefreshEnabled":true} jobInstructions={"uri":"link/to/instructions"} jobType="Labeling" labelCategories={"myCategory1":{"allowMultiSelect":true,"classes":{"myLabelClass1":{"displayName":"myLabelClass1","subclasses":{}},"myLabelClass2":{"displayName":"myLabelClass2","subclasses":{}}},"displayName":"myCategory1Title"},"myCategory2":{"allowMultiSelect":true,"classes":{"myLabelClass1":{"displayName":"myLabelClass1","subclasses":{}},"myLabelClass2":{"displayName":"myLabelClass2","subclasses":{}}},"displayName":"myCategory2Title"}} labelingJobMediaProperties={"mediaType":"Image"} mlAssistConfiguration={"inferencingComputeBinding":{"instanceCount":1,"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/myscoringcompute"},"mlAssistEnabled":true,"trainingComputeBinding":{"instanceCount":1,"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mytrainingcompute"}} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+ --id "testLabelingJob" --resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices labeling-job show --id "testLabelingJob" --include-job-instructions true \
+ --include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices labeling-job list --count "10" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Export-label #####
+```
+az machinelearningservices labeling-job export-label --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Pause #####
+```
+az machinelearningservices labeling-job pause --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Resume #####
+```
+az machinelearningservices labeling-job resume --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices labeling-job delete --id "testLabelingJob" --resource-group "workspace-1234" \
+ --workspace-name "testworkspace"
+```
+#### machinelearningservices model-container ####
+##### Create #####
+```
+az machinelearningservices model-container create --name "testContainer" \
+ --properties description="Model container description" tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices model-container show --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices model-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices model-container delete --name "testContainer" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices model-version ####
+##### Create #####
+```
+az machinelearningservices model-version create --name "testContainer" \
+ --properties path="path/in/datastore" description="Model version description" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg123/providers/Microsoft.MachineLearningServices/workspaces/workspace123/datastores/datastore123" flavors={"python_function":{"data":{"loader_module":"myLoaderModule"}}} properties={"prop1":"value1","prop2":"value2"} tags={"tag1":"value1","tag2":"value2"} \
+ --resource-group "testrg123" --version "1" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices model-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices model-version list --name "testContainer" --resource-group "testrg123" --version "1" \
+ --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices model-version delete --name "testContainer" --resource-group "testrg123" --version "999" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices online-endpoint ####
+##### Create #####
+```
+az machinelearningservices online-endpoint create --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" --location "string" \
+ --properties description="string" authMode="AMLToken" keys={"primaryKey":"string","secondaryKey":"string"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} target="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/compute123" traffic={"myDeployment1":0,"myDeployment2":1} \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices online-endpoint show --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices online-endpoint list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Update #####
+```
+az machinelearningservices online-endpoint update --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" --traffic myDeployment1=0 myDeployment2=1 \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Get-token #####
+```
+az machinelearningservices online-endpoint get-token --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### List-key #####
+```
+az machinelearningservices online-endpoint list-key --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Regenerate-key #####
+```
+az machinelearningservices online-endpoint regenerate-key --key-type "Primary" --key-value "string" \
+ --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices online-endpoint delete --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+#### machinelearningservices online-deployment ####
+##### Create #####
+```
+az machinelearningservices online-deployment create --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" --location "string" \
+ --properties "{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfiguration\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/code123/versions/1\\",\\"scoringScript\\":\\"string\\"},\\"containerResourceRequirements\\":{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\"memoryInGBLimit\\":64},\\"endpointComputeType\\":\\"K8S\\",\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/env123\\",\\"livenessProbe\\":{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshold\\":50,\\"timeout\\":\\"PT1M\\"},\\"model\\":{\\"assetId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/models/model123\\",\\"referenceType\\":\\"Id\\"},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"provisioningState\\":\\"Creating\\",\\"requestSettings\\":{\\"maxConcurrentRequestsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTimeout\\":\\"PT1M\\"},\\"scaleSettings\\":{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"targetUtilizationPercentage\\":50}}" \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Create #####
+```
+az machinelearningservices online-deployment create --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" --location "string" \
+ --properties "{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfiguration\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/code123/versions/1\\",\\"scoringScript\\":\\"string\\"},\\"endpointComputeType\\":\\"Managed\\",\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/env123\\",\\"livenessProbe\\":{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshold\\":50,\\"timeout\\":\\"PT1M\\"},\\"model\\":{\\"assetId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/models/model123\\",\\"referenceType\\":\\"Id\\"},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"provisioningState\\":\\"Creating\\",\\"requestSettings\\":{\\"maxConcurrentRequestsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTimeout\\":\\"PT1M\\"},\\"scaleSettings\\":{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"targetUtilizationPercentage\\":50}}" \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### List #####
+```
+az machinelearningservices online-deployment list --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Show #####
+```
+az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Update #####
+```
+az machinelearningservices online-deployment update --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" \
+ --properties "{\\"containerResourceRequirements\\":{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\"memoryInGBLimit\\":64},\\"endpointComputeType\\":\\"K8S\\",\\"scaleSettings\\":{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\"}}" \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Update #####
+```
+az machinelearningservices online-deployment update --type "UserAssigned" \
+ --user-assigned-identities "{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" \
+ --kind "string" \
+ --properties "{\\"endpointComputeType\\":\\"Managed\\",\\"readinessProbe\\":{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshold\\":50,\\"timeout\\":\\"PT1M\\"},\\"scaleSettings\\":{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\"}}" \
+ --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Get-log #####
+```
+az machinelearningservices online-deployment get-log --container-type "StorageInitializer" --tail 0 \
+ --deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" \
+ --workspace-name "workspace123"
+```
+##### Delete #####
+```
+az machinelearningservices online-deployment delete --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+ --resource-group "testrg123" --workspace-name "workspace123"
+```
+#### machinelearningservices workspace-feature ####
+##### List #####
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+#### machinelearningservices workspace-sku ####
+##### List #####
+```
+az machinelearningservices workspace-sku list
+```
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/__init__.py
new file mode 100644
index 00000000000..b234b2a3aa6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/__init__.py
@@ -0,0 +1,50 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+from azure.cli.core import AzCommandsLoader
+from azext_machinelearningservices.generated._help import helps # pylint: disable=unused-import
+try:
+ from azext_machinelearningservices.manual._help import helps # pylint: disable=reimported
+except ImportError:
+ pass
+
+
+class AzureMachineLearningWorkspacesCommandsLoader(AzCommandsLoader):
+
+ def __init__(self, cli_ctx=None):
+ from azure.cli.core.commands import CliCommandType
+ from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices_cl
+ machinelearningservices_custom = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.custom#{}',
+ client_factory=cf_machinelearningservices_cl)
+ parent = super(AzureMachineLearningWorkspacesCommandsLoader, self)
+ parent.__init__(cli_ctx=cli_ctx, custom_command_type=machinelearningservices_custom)
+
+ def load_command_table(self, args):
+ from azext_machinelearningservices.generated.commands import load_command_table
+ load_command_table(self, args)
+ try:
+ from azext_machinelearningservices.manual.commands import load_command_table as load_command_table_manual
+ load_command_table_manual(self, args)
+ except ImportError:
+ pass
+ return self.command_table
+
+ def load_arguments(self, command):
+ from azext_machinelearningservices.generated._params import load_arguments
+ load_arguments(self, command)
+ try:
+ from azext_machinelearningservices.manual._params import load_arguments as load_arguments_manual
+ load_arguments_manual(self, command)
+ except ImportError:
+ pass
+
+
+COMMAND_LOADER_CLS = AzureMachineLearningWorkspacesCommandsLoader
diff --git a/src/machinelearningservices/azext_machinelearningservices/action.py b/src/machinelearningservices/azext_machinelearningservices/action.py
new file mode 100644
index 00000000000..d95d53bf711
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/action.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.action import * # noqa: F403
+try:
+ from .manual.action import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
new file mode 100644
index 00000000000..cfc30c747c7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
@@ -0,0 +1,4 @@
+{
+ "azext.isExperimental": true,
+ "azext.minCliCoreVersion": "2.15.0"
+}
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/custom.py b/src/machinelearningservices/azext_machinelearningservices/custom.py
new file mode 100644
index 00000000000..dbe9d5f9742
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/custom.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.custom import * # noqa: F403
+try:
+ from .manual.custom import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
new file mode 100644
index 00000000000..5d949b11290
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
@@ -0,0 +1,116 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+def cf_machinelearningservices_cl(cli_ctx, *_):
+ from azure.cli.core.commands.client_factory import get_mgmt_service_client
+ from azext_machinelearningservices.vendored_sdks.machinelearningservices import AzureMachineLearningWorkspaces
+ return get_mgmt_service_client(cli_ctx,
+ AzureMachineLearningWorkspaces)
+
+
+def cf_workspace(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspaces
+
+
+def cf_usage(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).usages
+
+
+def cf_virtual_machine_size(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).virtual_machine_sizes
+
+
+def cf_quota(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).quotas
+
+
+def cf_compute(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).compute
+
+
+def cf_private_endpoint_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_endpoint_connections
+
+
+def cf_private_link_resource(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_link_resources
+
+
+def cf_workspace_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_connections
+
+
+def cf_batch_endpoint(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).batch_endpoints
+
+
+def cf_batch_deployment(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).batch_deployments
+
+
+def cf_code_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).code_containers
+
+
+def cf_code_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).code_versions
+
+
+def cf_data_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).data_containers
+
+
+def cf_data_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).data_versions
+
+
+def cf_datastore(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).datastores
+
+
+def cf_environment_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).environment_containers
+
+
+def cf_environment_specification_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).environment_specification_versions
+
+
+def cf_job(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).jobs
+
+
+def cf_labeling_job(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).labeling_jobs
+
+
+def cf_model_container(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).model_containers
+
+
+def cf_model_version(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).model_versions
+
+
+def cf_online_endpoint(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).online_endpoints
+
+
+def cf_online_deployment(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).online_deployments
+
+
+def cf_workspace_feature(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_features
+
+
+def cf_workspace_sku(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_skus
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_help.py b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
new file mode 100644
index 00000000000..f1818e382fb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
@@ -0,0 +1,2038 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+
+from knack.help_files import helps
+
+
+helps['machinelearningservices workspace'] = """
+ type: group
+ short-summary: Manage workspace with machinelearningservices
+"""
+
+helps['machinelearningservices workspace list'] = """
+ type: command
+ short-summary: "Lists all the available machine learning workspaces under the specified resource group. And Lists \
+all the available machine learning workspaces under the specified subscription."
+ examples:
+ - name: Get Workspaces by Resource Group
+ text: |-
+ az machinelearningservices workspace list --resource-group "workspace-1234"
+ - name: Get Workspaces by subscription
+ text: |-
+ az machinelearningservices workspace list
+"""
+
+helps['machinelearningservices workspace show'] = """
+ type: command
+ short-summary: "Gets the properties of the specified machine learning workspace."
+ examples:
+ - name: Get Workspace
+ text: |-
+ az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace create'] = """
+ type: command
+ short-summary: "Create a workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --shared-private-link-resources
+ short-summary: "The list of shared private link resources in this workspace."
+ long-summary: |
+ Usage: --shared-private-link-resources name=XX private-link-resource-id=XX group-id=XX request-message=XX \
+status=XX
+
+ name: Unique name of the private link.
+ private-link-resource-id: The resource id that private link links to.
+ group-id: The private link resource group id.
+ request-message: Request message.
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+
+ Multiple actions can be specified by using more than one --shared-private-link-resources argument.
+ - name: --identity
+ short-summary: "The identity that will be used to access the key vault for encryption at rest."
+ long-summary: |
+ Usage: --identity user-assigned-identity=XX
+
+ user-assigned-identity: The ArmId of the user assigned identity that will be used to access the customer \
+managed key vault
+ - name: --key-vault-properties
+ short-summary: "Customer Key vault properties."
+ long-summary: |
+ Usage: --key-vault-properties key-vault-arm-id=XX key-identifier=XX identity-client-id=XX
+
+ key-vault-arm-id: Required. The ArmId of the keyVault where the customer owned encryption key is present.
+ key-identifier: Required. Key vault uri to access the encryption key.
+ identity-client-id: For future use - The client id of the identity which will be used to access key vault.
+ examples:
+ - name: Create Workspace
+ text: |-
+ az machinelearningservices workspace create --identity type="SystemAssigned,UserAssigned" \
+userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mi\
+crosoft.ManagedIdentity/userAssignedIdentities/testuai":{}} --location "eastus2euap" --description "test description" \
+--application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/mic\
+rosoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/reso\
+urceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" --identity \
+user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mic\
+rosoft.ManagedIdentity/userAssignedIdentities/testuai" --key-vault-properties identity-client-id="" \
+key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" \
+key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft\
+.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false --key-vault \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/\
+testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-22\
+22-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/priva\
+teLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace update'] = """
+ type: command
+ short-summary: "Updates a machine learning workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Update Workspace
+ text: |-
+ az machinelearningservices workspace update --description "new description" --friendly-name "New \
+friendly name" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace delete'] = """
+ type: command
+ short-summary: "Deletes a machine learning workspace."
+ examples:
+ - name: Delete Workspace
+ text: |-
+ az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-key'] = """
+ type: command
+ short-summary: "Lists all the keys associated with this workspace. This includes keys for the storage account, app \
+insights and password for container registry."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace list-notebook-access-token'] = """
+ type: command
+ short-summary: "return notebook access token and refresh token."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" \
+--name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-notebook-key'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-notebook-key --resource-group "testrg123" --name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace list-storage-account-key'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-storage-account-key --resource-group "testrg123" --name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace prepare-notebook'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: Prepare Notebook
+ text: |-
+ az machinelearningservices workspace prepare-notebook --resource-group "testrg123" --name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace resync-key'] = """
+ type: command
+ short-summary: "Resync all the keys associated with this workspace. This includes keys for the storage account, \
+app insights and password for container registry."
+ examples:
+ - name: Resync Workspace Keys
+ text: |-
+ az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices workspace is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+created.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--created
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+deleted.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--deleted
+"""
+
+helps['machinelearningservices usage'] = """
+ type: group
+ short-summary: Manage usage with machinelearningservices
+"""
+
+helps['machinelearningservices usage list'] = """
+ type: command
+ short-summary: "Gets the current usage information as well as limits for AML resources for given subscription and \
+location."
+ examples:
+ - name: List Usages
+ text: |-
+ az machinelearningservices usage list --location "eastus"
+"""
+
+helps['machinelearningservices virtual-machine-size'] = """
+ type: group
+ short-summary: Manage virtual machine size with machinelearningservices
+"""
+
+helps['machinelearningservices virtual-machine-size list'] = """
+ type: command
+ short-summary: "Returns supported VM Sizes in a location."
+ examples:
+ - name: List VM Sizes
+ text: |-
+ az machinelearningservices virtual-machine-size list --location "eastus"
+"""
+
+helps['machinelearningservices quota'] = """
+ type: group
+ short-summary: Manage quota with machinelearningservices
+"""
+
+helps['machinelearningservices quota list'] = """
+ type: command
+ short-summary: "Gets the currently assigned Workspace Quotas based on VMFamily."
+ examples:
+ - name: List workspace quotas by VMFamily
+ text: |-
+ az machinelearningservices quota list --location "eastus"
+"""
+
+helps['machinelearningservices quota update'] = """
+ type: command
+ short-summary: "Update quota for each VM family in workspace."
+ parameters:
+ - name: --value
+ short-summary: "The list for update quota."
+ long-summary: |
+ Usage: --value id=XX type=XX limit=XX unit=XX
+
+ id: Specifies the resource ID.
+ type: Specifies the resource type.
+ limit: The maximum permitted quota of the resource.
+ unit: An enum describing the unit of quota measurement.
+
+ Multiple actions can be specified by using more than one --value argument.
+ examples:
+ - name: update quotas
+ text: |-
+ az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServi\
+ces/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.Ma\
+chineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 \
+unit="Count" --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0\
+000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standa\
+rd_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+"""
+
+helps['machinelearningservices compute'] = """
+ type: group
+ short-summary: Manage compute with machinelearningservices
+"""
+
+helps['machinelearningservices compute list'] = """
+ type: command
+ short-summary: "Gets computes in specified workspace."
+ examples:
+ - name: Get Computes
+ text: |-
+ az machinelearningservices compute list --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute show'] = """
+ type: command
+ short-summary: "Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not \
+returned - use 'keys' nested resource to get them."
+ examples:
+ - name: Get a AKS Compute
+ text: |-
+ az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Get a AML Compute
+ text: |-
+ az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Get an ComputeInstance
+ text: |-
+ az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AKS\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osT\
+ype\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"\
+minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0\
+0000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery\
+/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"DataFactory\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeIns\
+tanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"0\
+0000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\
+\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with Schedules
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeIns\
+tanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"0\
+0000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"schedules\\":{\\"\
+computeStartStop\\":[{\\"action\\":\\"Stop\\",\\"cron\\":{\\"expression\\":\\"0 18 * * *\\",\\"startTime\\":\\"2021-04-\
+23T01:30:00\\",\\"timeZone\\":\\"Pacific Standard Time\\"},\\"status\\":\\"Enabled\\",\\"triggerType\\":\\"Cron\\"}]},\
+\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STA\
+NDARD_NC6\\"}}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"vmSize\\":\\"STANDARD_NC6\\"}}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute update'] = """
+ type: command
+ short-summary: "Updates properties of a compute. This call will overwrite a compute if it exists. This is a \
+nonrecoverable operation."
+ parameters:
+ - name: --scale-settings
+ short-summary: "Desired scale settings for the amlCompute."
+ long-summary: |
+ Usage: --scale-settings max-node-count=XX min-node-count=XX node-idle-time-before-scale-down=XX
+
+ max-node-count: Required. Max number of nodes to use
+ min-node-count: Min number of nodes to use
+ node-idle-time-before-scale-down: Node Idle Time before scaling down amlCompute. This string needs to be \
+in the RFC Format.
+ examples:
+ - name: Update a AmlCompute Compute
+ text: |-
+ az machinelearningservices compute update --name "compute123" --scale-settings max-node-count=4 \
+min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute delete'] = """
+ type: command
+ short-summary: "Deletes specified Machine Learning compute."
+ examples:
+ - name: Delete Compute
+ text: |-
+ az machinelearningservices compute delete --name "compute123" --resource-group "testrg123" \
+--underlying-resource-action "Delete" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute list-key'] = """
+ type: command
+ short-summary: "Gets secrets related to Machine Learning compute (storage keys, service credentials, etc)."
+ examples:
+ - name: List AKS Compute Keys
+ text: |-
+ az machinelearningservices compute list-key --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute list-node'] = """
+ type: command
+ short-summary: "Get the details (e.g IP address, port etc) of all the compute nodes in the compute."
+ examples:
+ - name: Get compute nodes information for a compute
+ text: |-
+ az machinelearningservices compute list-node --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute restart'] = """
+ type: command
+ short-summary: "Posts a restart action to a compute instance."
+ examples:
+ - name: Restart ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute restart --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute start'] = """
+ type: command
+ short-summary: "Posts a start action to a compute instance."
+ examples:
+ - name: Start ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute start --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute stop'] = """
+ type: command
+ short-summary: "Posts a stop action to a compute instance."
+ examples:
+ - name: Stop ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute stop --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute update-schedule'] = """
+ type: command
+ short-summary: "Updates schedules of a compute instance."
+ examples:
+ - name: Update schedules of ComputeInstance
+ text: |-
+ az machinelearningservices compute update-schedule --name "compute123" --compute-start-stop \
+"[{\\"action\\":\\"Start\\",\\"recurrence\\":{\\"frequency\\":\\"Day\\",\\"interval\\":1,\\"schedule\\":{\\"hours\\":[1\
+8],\\"minutes\\":[30],\\"weekDays\\":null},\\"startTime\\":\\"2021-04-23T01:30:00\\",\\"timeZone\\":\\"Pacific \
+Standard Time\\"},\\"status\\":\\"Enabled\\",\\"triggerType\\":\\"Recurrence\\"}]" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices compute is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices compute is successfully \
+created.
+ text: |-
+ az machinelearningservices compute wait --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices compute is successfully \
+updated.
+ text: |-
+ az machinelearningservices compute wait --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices compute is successfully \
+deleted.
+ text: |-
+ az machinelearningservices compute wait --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123" --deleted
+"""
+
+helps['machinelearningservices private-endpoint-connection'] = """
+ type: group
+ short-summary: Manage private endpoint connection with machinelearningservices
+"""
+
+helps['machinelearningservices private-endpoint-connection list'] = """
+ type: command
+ short-summary: "List all the private endpoint connections associated with the workspace."
+ examples:
+ - name: StorageAccountListPrivateEndpointConnections
+ text: |-
+ az machinelearningservices private-endpoint-connection list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection show'] = """
+ type: command
+ short-summary: "Gets the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceGetPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection create'] = """
+ type: command
+ short-summary: "Update the state of specified private endpoint connection associated with the workspace."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --private-link-service-connection-state
+ short-summary: "A collection of information about the state of the connection between service consumer and \
+provider."
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+ description: The reason for approval/rejection of the connection.
+ actions-required: A message indicating if changes on the service provider require any updates on the \
+consumer.
+ examples:
+ - name: WorkspacePutPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection create --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection update'] = """
+ type: command
+ short-summary: "Update the state of specified private endpoint connection associated with the workspace."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --private-link-service-connection-state
+ short-summary: "A collection of information about the state of the connection between service consumer and \
+provider."
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+ description: The reason for approval/rejection of the connection.
+ actions-required: A message indicating if changes on the service provider require any updates on the \
+consumer.
+"""
+
+helps['machinelearningservices private-endpoint-connection delete'] = """
+ type: command
+ short-summary: "Deletes the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceDeletePrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-link-resource'] = """
+ type: group
+ short-summary: Manage private link resource with machinelearningservices
+"""
+
+helps['machinelearningservices private-link-resource list'] = """
+ type: command
+ short-summary: "Gets the private link resources that need to be created for a workspace."
+ examples:
+ - name: WorkspaceListPrivateLinkResources
+ text: |-
+ az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices workspace-connection'] = """
+ type: group
+ short-summary: Manage workspace connection with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-connection list'] = """
+ type: command
+ short-summary: "List all connections under a AML workspace."
+ examples:
+ - name: ListWorkspaceConnections
+ text: |-
+ az machinelearningservices workspace-connection list --category "ACR" --resource-group \
+"resourceGroup-1" --target "www.facebook.com" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection show'] = """
+ type: command
+ short-summary: "Get the detail of a workspace connection."
+ examples:
+ - name: GetWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection create'] = """
+ type: command
+ short-summary: "Add a new workspace connection."
+ examples:
+ - name: CreateWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection create --connection-name "connection-1" --auth-type \
+"PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection delete'] = """
+ type: command
+ short-summary: "Delete a workspace connection."
+ examples:
+ - name: DeleteWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+--resource-group "resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices batch-endpoint'] = """
+ type: group
+ short-summary: Manage batch endpoint with machinelearningservices
+"""
+
+helps['machinelearningservices batch-endpoint list'] = """
+ type: command
+ short-summary: "Lists Batch inference endpoint in the workspace."
+ examples:
+ - name: List Batch Endpoint.
+ text: |-
+ az machinelearningservices batch-endpoint list --count 1 --resource-group "resourceGroup-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices batch-endpoint show'] = """
+ type: command
+ short-summary: "Gets a batch inference endpoint by name."
+ examples:
+ - name: Get Batch Endpoint.
+ text: |-
+ az machinelearningservices batch-endpoint show --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices batch-endpoint create'] = """
+ type: command
+ short-summary: "Creates a batch inference endpoint."
+ parameters:
+ - name: --keys
+ short-summary: "EndpointAuthKeys to set initially on an Endpoint. This property will always be returned as \
+null. AuthKey values must be retrieved using the ListKeys API."
+ long-summary: |
+ Usage: --keys primary-key=XX secondary-key=XX
+
+ primary-key: The primary key.
+ secondary-key: The secondary key.
+ examples:
+ - name: CreateOrUpdate Batch Endpoint.
+ text: |-
+ az machinelearningservices batch-endpoint create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties description="string" authMode="AMLToken" keys={"primaryKey":"string","seconda\
+ryKey":"string"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+traffic={"myDeployment1":0,"myDeployment2":1} --tags additionalProp1="string" additionalProp2="string" \
+additionalProp3="string" --endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices batch-endpoint update'] = """
+ type: command
+ short-summary: "Update a batch inference endpoint."
+ examples:
+ - name: Update Batch Endpoint.
+ text: |-
+ az machinelearningservices batch-endpoint update --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --endpoint-name "testBatchEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices batch-endpoint delete'] = """
+ type: command
+ short-summary: "Delete Batch Inference Endpoint."
+ examples:
+ - name: Delete Batch Endpoint.
+ text: |-
+ az machinelearningservices batch-endpoint delete --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices batch-endpoint list-key'] = """
+ type: command
+ short-summary: "Lists batch Inference Endpoint keys."
+ examples:
+ - name: ListKeys Batch Endpoint.
+ text: |-
+ az machinelearningservices batch-endpoint list-key --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices batch-deployment'] = """
+ type: group
+ short-summary: Manage batch deployment with machinelearningservices
+"""
+
+helps['machinelearningservices batch-deployment list'] = """
+ type: command
+ short-summary: "Lists Batch inference deployments in the workspace."
+ examples:
+ - name: List Batch Deployment.
+ text: |-
+ az machinelearningservices batch-deployment list --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices batch-deployment show'] = """
+ type: command
+ short-summary: "Gets a batch inference deployment by id."
+ examples:
+ - name: Get Batch Deployment.
+ text: |-
+ az machinelearningservices batch-deployment show --deployment-name "testBatchDeployment" \
+--endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices batch-deployment create'] = """
+ type: command
+ short-summary: "Create a batch inference deployment."
+ parameters:
+ - name: --code-configuration
+ short-summary: "Code configuration for the endpoint deployment."
+ long-summary: |
+ Usage: --code-configuration code-id=XX scoring-script=XX
+
+ code-id: ARM resource ID of the code asset.
+ scoring-script: Required. The script to execute on startup. eg. "score.py"
+ - name: --data-path-asset-reference
+ short-summary: "Reference to an asset via its path in a datastore."
+ long-summary: |
+ Usage: --data-path-asset-reference datastore-id=XX path=XX reference-type=XX
+
+ datastore-id: ARM resource ID of the datastore where the asset is located.
+ path: The path of the file/directory in the datastore.
+ reference-type: Required. Specifies the type of asset reference.
+ - name: --id-asset-reference
+ short-summary: "Reference to an asset via its ARM resource ID."
+ long-summary: |
+ Usage: --id-asset-reference asset-id=XX reference-type=XX
+
+ asset-id: Required. ARM resource ID of the asset.
+ reference-type: Required. Specifies the type of asset reference.
+ - name: --output-path-asset-reference
+ short-summary: "Reference to an asset via its path in a job output."
+ long-summary: |
+ Usage: --output-path-asset-reference job-id=XX path=XX reference-type=XX
+
+ job-id: ARM resource ID of the job.
+ path: The path of the file/directory in the job output.
+ reference-type: Required. Specifies the type of asset reference.
+ - name: --output-configuration
+ short-summary: "Output configuration for the batch inference operation."
+ long-summary: |
+ Usage: --output-configuration append-row-file-name=XX output-action=XX
+
+ append-row-file-name: Customized output file name for append_row output action.
+ output-action: Indicates how the output will be organized.
+ - name: --retry-settings
+ short-summary: "Retry Settings for the batch inference operation."
+ long-summary: |
+ Usage: --retry-settings max-retries=XX timeout=XX
+
+ max-retries: Maximum retry count for a mini-batch
+ timeout: Invocation timeout for a mini-batch, in ISO 8601 format.
+ examples:
+ - name: CreateOrUpdate Batch Deployment.
+ text: |-
+ az machinelearningservices batch-deployment create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties description="string" codeConfiguration={"codeId":"/subscriptions/00000000-111\
+1-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testw\
+orkspace/codes/testcode/versions/1","scoringScript":"score.py"} compute={"instanceCount":0,"instanceType":"string","isL\
+ocal":false,"location":"string","properties":{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"\
+string"},"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Micr\
+osoft.MachineLearningServices/workspaces/testworkspace/computes/testcompute"} environmentId="/subscriptions/00000000-11\
+11-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/test\
+workspace/environments/myenv" environmentVariables={"additionalProp1":"string","additionalProp2":"string","additionalPr\
+op3":"string"} errorThreshold=0 loggingLevel="Info" miniBatchSize=0 model={"assetId":"/subscriptions/00000000-1111-2222\
+-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspa\
+ce/models/testmodel/versions/1","referenceType":"Id"} outputConfiguration={"appendRowFileName":"string","outputAction":\
+"SummaryOnly"} partitionKeys="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp\
+3":"string"} retrySettings={"maxRetries":0,"timeout":"string"} --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testBatchDeployment" --endpoint-name \
+"testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices batch-deployment update'] = """
+ type: command
+ short-summary: "Update a batch inference deployment."
+ examples:
+ - name: Update Batch Deployment.
+ text: |-
+ az machinelearningservices batch-deployment update --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testBatchDeployment" --endpoint-name \
+"testBatchEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices batch-deployment delete'] = """
+ type: command
+ short-summary: "Delete Batch Inference deployment."
+ examples:
+ - name: Delete Batch Deployment.
+ text: |-
+ az machinelearningservices batch-deployment delete --deployment-name "testBatchDeployment" \
+--endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-container'] = """
+ type: group
+ short-summary: Manage code container with machinelearningservices
+"""
+
+helps['machinelearningservices code-container list'] = """
+ type: command
+ short-summary: "List containers."
+ examples:
+ - name: List Code Container.
+ text: |-
+ az machinelearningservices code-container list --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices code-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Code Container.
+ text: |-
+ az machinelearningservices code-container show --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Code Container.
+ text: |-
+ az machinelearningservices code-container create --name "testContainer" --properties \
+description="string" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices code-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices code-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Code Container.
+ text: |-
+ az machinelearningservices code-container delete --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version'] = """
+ type: group
+ short-summary: Manage code version with machinelearningservices
+"""
+
+helps['machinelearningservices code-version list'] = """
+ type: command
+ short-summary: "List versions."
+ examples:
+ - name: List Code Version.
+ text: |-
+ az machinelearningservices code-version list --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Code Version.
+ text: |-
+ az machinelearningservices code-version show --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version create'] = """
+ type: command
+ short-summary: "Create version."
+ examples:
+ - name: CreateOrUpdate Code Version.
+ text: |-
+ az machinelearningservices code-version create --name "testContainer" --properties \
+path="path/to/file.py" description="string" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGr\
+oups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastores/mydatastore" \
+isAnonymous=true properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices code-version update'] = """
+ type: command
+ short-summary: "Update version."
+"""
+
+helps['machinelearningservices code-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Code Version.
+ text: |-
+ az machinelearningservices code-version delete --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices data-container'] = """
+ type: group
+ short-summary: Manage data container with machinelearningservices
+"""
+
+helps['machinelearningservices data-container list'] = """
+ type: command
+ short-summary: "List containers."
+ examples:
+ - name: List Data Container.
+ text: |-
+ az machinelearningservices data-container list --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices data-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Data Container.
+ text: |-
+ az machinelearningservices data-container show --name "datacontainer123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Data Container.
+ text: |-
+ az machinelearningservices data-container create --name "datacontainer123" --properties \
+description="string" properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} \
+--resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices data-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Data Container.
+ text: |-
+ az machinelearningservices data-container delete --name "datacontainer123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-version'] = """
+ type: group
+ short-summary: Manage data version with machinelearningservices
+"""
+
+helps['machinelearningservices data-version list'] = """
+ type: command
+ short-summary: "List data versions."
+ examples:
+ - name: List Data Version.
+ text: |-
+ az machinelearningservices data-version list --name "dataset123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Data Version.
+ text: |-
+ az machinelearningservices data-version show --name "dataset123" --resource-group "testrg123" --version \
+"1" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-version create'] = """
+ type: command
+ short-summary: "Create version."
+ examples:
+ - name: CreateOrUpdate Data Version.
+ text: |-
+ az machinelearningservices data-version create --name "dataset123" --properties path="path/to/file.csv" \
+description="string" datasetType="Simple" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGrou\
+ps/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastores/mydatastore" \
+isAnonymous=true properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--version "1" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices data-version update'] = """
+ type: command
+ short-summary: "Update version."
+"""
+
+helps['machinelearningservices data-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Data Version.
+ text: |-
+ az machinelearningservices data-version delete --name "dataset123" --resource-group "testrg123" \
+--version "1" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices datastore'] = """
+ type: group
+ short-summary: Manage datastore with machinelearningservices
+"""
+
+helps['machinelearningservices datastore list'] = """
+ type: command
+ short-summary: "List datastores."
+ examples:
+ - name: List datastores.
+ text: |-
+ az machinelearningservices datastore list --resource-group "testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore show'] = """
+ type: command
+ short-summary: "Get datastore."
+ examples:
+ - name: Get datastore.
+ text: |-
+ az machinelearningservices datastore show --name "testDatastore" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore create'] = """
+ type: command
+ short-summary: "Create datastore."
+ parameters:
+ - name: --linked-info
+ short-summary: "Information about the datastore origin, if linked."
+ long-summary: |
+ Usage: --linked-info linked-id=XX linked-resource-name=XX origin=XX
+
+ linked-id: Linked service ID.
+ linked-resource-name: Linked service resource name.
+ origin: Type of the linked service.
+ examples:
+ - name: CreateOrUpdate datastore (Azure Data Lake Gen1 w/ ServicePrincipal).
+ text: |-
+ az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"contentsType":"AzureDataLakeGen1","credentials":{"authorityUrl":"string","clientId":"00000000-1111-2222-3333\
+-444444444444","credentialsType":"ServicePrincipal","resourceUri":"string","secrets":{"clientSecret":"string","secretsT\
+ype":"ServicePrincipal"},"tenantId":"00000000-1111-2222-3333-444444444444"},"storeName":"testStore"} isDefault=true \
+linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string\
+","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","a\
+dditionalProp3":"string"} --resource-group "testrg123" --workspace-name "testworkspace"
+ - name: CreateOrUpdate datastore (Azure Data Lake Gen2 w/ Service Principal).
+ text: |-
+ az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"accountName":"string","containerName":"string","contentsType":"AzureBlob","credentials":{"authorityUrl":"str\
+ing","clientId":"00000000-1111-2222-3333-444444444444","credentialsType":"ServicePrincipal","resourceUri":"string","sec\
+rets":{"clientSecret":"string","secretsType":"ServicePrincipal"},"tenantId":"00000000-1111-2222-3333-444444444444"},"en\
+dpoint":"core.windows.net","protocol":"https"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"str\
+ing","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+ - name: CreateOrUpdate datastore (Azure File store w/ AccountKey).
+ text: |-
+ az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"accountName":"string","containerName":"string","contentsType":"AzureFile","credentials":{"credentialsType":"\
+AccountKey","secrets":{"key":"string","secretsType":"AccountKey"}},"endpoint":"core.windows.net","protocol":"https"} \
+isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+ - name: CreateOrUpdate datastore (Azure Postgre SQL w/ SQL Admin).
+ text: |-
+ az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"contentsType":"AzurePostgreSql","credentials":{"credentialsType":"SqlAdmin","secrets":{"password":"string","\
+secretsType":"SqlAdmin"},"userId":"string"},"databaseName":"string","enableSSL":true,"endpoint":"string","portNumber":1\
+23,"serverName":"string"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synaps\
+e"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+ - name: CreateOrUpdate datastore (Azure SQL Database w/ SQL Admin).
+ text: |-
+ az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"contentsType":"AzureSqlDatabase","credentials":{"credentialsType":"SqlAdmin","secrets":{"password":"string",\
+"secretsType":"SqlAdmin"},"userId":"string"},"databaseName":"string","endpoint":"string","portNumber":123,"serverName":\
+"string"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+ - name: CreateOrUpdate datastore (AzureBlob w/ AccountKey).
+ text: |-
+ az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"accountName":"string","containerName":"string","contentsType":"AzureBlob","credentials":{"credentialsType":"\
+AccountKey","secrets":{"key":"string","secretsType":"AccountKey"}},"endpoint":"core.windows.net","protocol":"https"} \
+isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore update'] = """
+ type: command
+ short-summary: "Update datastore."
+ parameters:
+ - name: --linked-info
+ short-summary: "Information about the datastore origin, if linked."
+ long-summary: |
+ Usage: --linked-info linked-id=XX linked-resource-name=XX origin=XX
+
+ linked-id: Linked service ID.
+ linked-resource-name: Linked service resource name.
+ origin: Type of the linked service.
+"""
+
+helps['machinelearningservices datastore delete'] = """
+ type: command
+ short-summary: "Delete datastore."
+ examples:
+ - name: Delete datastore.
+ text: |-
+ az machinelearningservices datastore delete --name "testDatastore" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices datastore list-secret'] = """
+ type: command
+ short-summary: "Get datastore secrets."
+ examples:
+ - name: Get datastore secrets.
+ text: |-
+ az machinelearningservices datastore list-secret --name "testDatastore" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-container'] = """
+ type: group
+ short-summary: Manage environment container with machinelearningservices
+"""
+
+helps['machinelearningservices environment-container list'] = """
+ type: command
+ short-summary: "List containers."
+ examples:
+ - name: List Environment Container.
+ text: |-
+ az machinelearningservices environment-container list --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices environment-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Environment Container.
+ text: |-
+ az machinelearningservices environment-container show --name "testEnvironment" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Environment Container.
+ text: |-
+ az machinelearningservices environment-container create --name "testEnvironment" --properties \
+description="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices environment-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Environment Container.
+ text: |-
+ az machinelearningservices environment-container delete --name "testContainer" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version'] = """
+ type: group
+ short-summary: Manage environment specification version with machinelearningservices
+"""
+
+helps['machinelearningservices environment-specification-version list'] = """
+ type: command
+ short-summary: "List versions."
+ examples:
+ - name: List Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version list --name "testEnvironment" \
+--resource-group "testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version show --name "testEnvironment" \
+--resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version create'] = """
+ type: command
+ short-summary: "Create an EnvironmentSpecificationVersion."
+ parameters:
+ - name: --docker-build
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-build context=XX dockerfile=XX docker-specification-type=XX operating-system-type=XX
+
+ context: Path to a snapshot of the Docker Context. This property is only valid if Dockerfile is specified. \
+The path is relative to the asset path which must contain a single Blob URI value.
+ dockerfile: Required. Docker command line instructions to assemble an image.
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --docker-image
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-image docker-image-uri=XX docker-specification-type=XX operating-system-type=XX
+
+ docker-image-uri: Required. Image name of a custom base image.
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --liveness-route
+ short-summary: "The route to check the liveness of the inference server container."
+ long-summary: |
+ Usage: --liveness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --readiness-route
+ short-summary: "The route to check the readiness of the inference server container."
+ long-summary: |
+ Usage: --readiness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --scoring-route
+ short-summary: "The port to send the scoring requests to, within the inference server container."
+ long-summary: |
+ Usage: --scoring-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ examples:
+ - name: CreateOrUpdate Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version create --name "testEnvironment" \
+--properties description="string" condaFile="channels:\\n- defaults\\ndependencies:\\n- python=3.7.7\\nname: my-env" \
+docker={"dockerSpecificationType":"Build","dockerfile":"FROM myimage"} properties={"additionalProp1":"string","addition\
+alProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalPr\
+op3":"string"} --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices environment-specification-version update'] = """
+ type: command
+ short-summary: "Update an EnvironmentSpecificationVersion."
+ parameters:
+ - name: --docker-build
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-build context=XX dockerfile=XX docker-specification-type=XX operating-system-type=XX
+
+ context: Path to a snapshot of the Docker Context. This property is only valid if Dockerfile is specified. \
+The path is relative to the asset path which must contain a single Blob URI value.
+ dockerfile: Required. Docker command line instructions to assemble an image.
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --docker-image
+ short-summary: "Class to represent configuration settings for Docker Build"
+ long-summary: |
+ Usage: --docker-image docker-image-uri=XX docker-specification-type=XX operating-system-type=XX
+
+ docker-image-uri: Required. Image name of a custom base image.
+ docker-specification-type: Required. Docker specification must be either Build or Image
+ operating-system-type: The OS type the Environment.
+ - name: --liveness-route
+ short-summary: "The route to check the liveness of the inference server container."
+ long-summary: |
+ Usage: --liveness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --readiness-route
+ short-summary: "The route to check the readiness of the inference server container."
+ long-summary: |
+ Usage: --readiness-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+ - name: --scoring-route
+ short-summary: "The port to send the scoring requests to, within the inference server container."
+ long-summary: |
+ Usage: --scoring-route path=XX port=XX
+
+ path: Required. The path for the route.
+ port: Required. The port for the route.
+"""
+
+helps['machinelearningservices environment-specification-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Environment Specification Version.
+ text: |-
+ az machinelearningservices environment-specification-version delete --name "testContainer" \
+--resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices job'] = """
+ type: group
+ short-summary: Manage job with machinelearningservices
+"""
+
+helps['machinelearningservices job list'] = """
+ type: command
+ short-summary: "Lists Jobs in the workspace."
+ examples:
+ - name: List Command Job.
+ text: |-
+ az machinelearningservices job list --job-type "Command" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+ - name: List Sweep Job.
+ text: |-
+ az machinelearningservices job list --job-type "Sweep" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices job show'] = """
+ type: command
+ short-summary: "Gets a Job by name/id."
+ examples:
+ - name: Get Command Job.
+ text: |-
+ az machinelearningservices job show --id "testJob" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+ - name: Get Sweep Job.
+ text: |-
+ az machinelearningservices job show --id "testJob" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices job create'] = """
+ type: command
+ short-summary: "Creates and executes a Job."
+ examples:
+ - name: CreateOrUpdate Command Job.
+ text: |-
+ az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"codeId\\":\\"/sub\
+scriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningSe\
+rvices/workspaces/testworkspace/codes/mycode/versions/1\\",\\"command\\":\\"python file.py \
+test\\",\\"compute\\":{\\"instanceCount\\":1,\\"target\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resour\
+ceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mycompute\\"}\
+,\\"distribution\\":{\\"distributionType\\":\\"PyTorch\\",\\"processCount\\":2},\\"environmentId\\":\\"/subscriptions/0\
+0000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/works\
+paces/testworkspace/environments/AzureML-Tutorial/versions/1\\",\\"environmentVariables\\":{\\"MY_ENV_VAR1\\":\\"string\
+\\",\\"MY_ENV_VAR2\\":\\"string\\"},\\"experimentName\\":\\"myExperiment\\",\\"identity\\":{\\"identityType\\":\\"AMLTo\
+ken\\"},\\"inputDataBindings\\":{\\"test\\":{\\"dataId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resour\
+ceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/data/mydataset/version\
+s/1\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"jobType\\":\\"Command\\",\\"outputDataBindings\\":{\\"test\\":{\\\
+"datastoreId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Micr\
+osoft.MachineLearningServices/workspaces/testworkspace/datastore/mydatastore\\",\\"pathOnCompute\\":\\"path/on/compute\
+\\"}},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"\
+string\\"},\\"tags\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"s\
+tring\\"},\\"timeout\\":\\"PT1M\\"}" --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+ - name: CreateOrUpdate Sweep Job.
+ text: |-
+ az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"algorithm\\":\\"G\
+rid\\",\\"compute\\":{\\"instanceCount\\":1,\\"target\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourc\
+eGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mycompute\\"},\
+\\"identity\\":{\\"identityType\\":\\"AMLToken\\"},\\"jobType\\":\\"Sweep\\",\\"maxConcurrentTrials\\":1,\\"maxTotalTri\
+als\\":1,\\"objective\\":{\\"goal\\":\\"Minimize\\",\\"primaryMetric\\":\\"string\\"},\\"properties\\":{\\"additionalPr\
+op1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"searchSpace\\":{\\"name\\\
+":{}},\\"tags\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\
+\\"},\\"timeout\\":\\"PT1M\\",\\"trial\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resource\
+Groups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/mycode/versions/1\
+\\",\\"command\\":\\"python file.py test\\",\\"distribution\\":{\\"distributionType\\":\\"PyTorch\\",\\"processCount\\"\
+:2},\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/provid\
+ers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/AzureML-Tutorial/versions/1\\",\\"environme\
+ntVariables\\":{\\"MY_ENV_VAR1\\":\\"string\\",\\"MY_ENV_VAR2\\":\\"string\\"},\\"inputDataBindings\\":{\\"test\\":{\\"\
+dataId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.\
+MachineLearningServices/workspaces/testworkspace/data/mydataset/versions/1\\",\\"pathOnCompute\\":\\"path/on/compute\\"\
+}},\\"outputDataBindings\\":{\\"test\\":{\\"datastoreId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resou\
+rceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastore/mydatastore\
+\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"timeout\\":\\"PT1M\\"}}" --id "testJob" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices job update'] = """
+ type: command
+ short-summary: "Update and executes a Job."
+"""
+
+helps['machinelearningservices job delete'] = """
+ type: command
+ short-summary: "Deletes a Job (asynchronous)."
+ examples:
+ - name: Delete Job.
+ text: |-
+ az machinelearningservices job delete --id "testJob" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices job cancel'] = """
+ type: command
+ short-summary: "Cancels a Job."
+ examples:
+ - name: Cancel Job.
+ text: |-
+ az machinelearningservices job cancel --id "testJob" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices job wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices job is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices job is successfully deleted.
+ text: |-
+ az machinelearningservices job wait --id "testJob" --resource-group "testrg123" --workspace-name \
+"testworkspace" --deleted
+"""
+
+helps['machinelearningservices labeling-job'] = """
+ type: group
+ short-summary: Manage labeling job with machinelearningservices
+"""
+
+helps['machinelearningservices labeling-job list'] = """
+ type: command
+ short-summary: "Lists labeling jobs in the workspace."
+ examples:
+ - name: List Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job list --count "10" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job show'] = """
+ type: command
+ short-summary: "Gets a labeling job by name/id."
+ examples:
+ - name: Get Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job show --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job create'] = """
+ type: command
+ short-summary: "Create a labeling job (asynchronous)."
+ parameters:
+ - name: --dataset-configuration
+ short-summary: "Configuration of dataset used in the job."
+ long-summary: |
+ Usage: --dataset-configuration asset-name=XX dataset-version=XX incremental-dataset-refresh-enabled=XX
+
+ asset-name: Name of the data asset to perform labeling.
+ dataset-version: AML dataset version.
+ incremental-dataset-refresh-enabled: Indicates whether to enable incremental dataset refresh.
+ - name: --labeling-job-image-properties
+ short-summary: "Properties of a labeling job for image data"
+ long-summary: |
+ Usage: --labeling-job-image-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of image labeling job.
+ media-type: Required. Media type of the job.
+ - name: --labeling-job-text-properties
+ short-summary: "Properties of a labeling job for text data"
+ long-summary: |
+ Usage: --labeling-job-text-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of text labeling job.
+ media-type: Required. Media type of the job.
+ examples:
+ - name: CreateOrUpdate Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job create --properties description="string" \
+datasetConfiguration={"assetName":"myAsset","datasetVersion":"1","incrementalDatasetRefreshEnabled":true} \
+jobInstructions={"uri":"link/to/instructions"} jobType="Labeling" labelCategories={"myCategory1":{"allowMultiSelect":tr\
+ue,"classes":{"myLabelClass1":{"displayName":"myLabelClass1","subclasses":{}},"myLabelClass2":{"displayName":"myLabelCl\
+ass2","subclasses":{}}},"displayName":"myCategory1Title"},"myCategory2":{"allowMultiSelect":true,"classes":{"myLabelCla\
+ss1":{"displayName":"myLabelClass1","subclasses":{}},"myLabelClass2":{"displayName":"myLabelClass2","subclasses":{}}},"\
+displayName":"myCategory2Title"}} labelingJobMediaProperties={"mediaType":"Image"} mlAssistConfiguration={"inferencingC\
+omputeBinding":{"instanceCount":1,"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resource\
+Group-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/myscoringcompute"},"mlAssistEn\
+abled":true,"trainingComputeBinding":{"instanceCount":1,"target":"/subscriptions/00000000-1111-2222-3333-444444444444/r\
+esourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mytraini\
+ngcompute"}} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --id "testLabelingJob" \
+--resource-group "workspace-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job update'] = """
+ type: command
+ short-summary: "Update a labeling job (asynchronous)."
+ parameters:
+ - name: --dataset-configuration
+ short-summary: "Configuration of dataset used in the job."
+ long-summary: |
+ Usage: --dataset-configuration asset-name=XX dataset-version=XX incremental-dataset-refresh-enabled=XX
+
+ asset-name: Name of the data asset to perform labeling.
+ dataset-version: AML dataset version.
+ incremental-dataset-refresh-enabled: Indicates whether to enable incremental dataset refresh.
+ - name: --labeling-job-image-properties
+ short-summary: "Properties of a labeling job for image data"
+ long-summary: |
+ Usage: --labeling-job-image-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of image labeling job.
+ media-type: Required. Media type of the job.
+ - name: --labeling-job-text-properties
+ short-summary: "Properties of a labeling job for text data"
+ long-summary: |
+ Usage: --labeling-job-text-properties annotation-type=XX media-type=XX
+
+ annotation-type: Annotation type of text labeling job.
+ media-type: Required. Media type of the job.
+"""
+
+helps['machinelearningservices labeling-job delete'] = """
+ type: command
+ short-summary: "Delete a labeling job."
+ examples:
+ - name: Delete Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job delete --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job export-label'] = """
+ type: command
+ short-summary: "Export labels from a labeling job (asynchronous)."
+ parameters:
+ - name: --coco-export-summary
+ long-summary: |
+ Usage: --coco-export-summary format=XX
+
+ format: Required. The format of exported labels, also as the discriminator.
+ - name: --csv-export-summary
+ long-summary: |
+ Usage: --csv-export-summary format=XX
+
+ format: Required. The format of exported labels, also as the discriminator.
+ - name: --dataset-export-summary
+ long-summary: |
+ Usage: --dataset-export-summary format=XX
+
+ format: Required. The format of exported labels, also as the discriminator.
+ examples:
+ - name: ExportLabels Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job export-label --id "testLabelingJob" --resource-group \
+"workspace-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job pause'] = """
+ type: command
+ short-summary: "Pause a labeling job."
+ examples:
+ - name: Pause Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job pause --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job resume'] = """
+ type: command
+ short-summary: "Resume a labeling job (asynchronous)."
+ examples:
+ - name: Resume Labeling Job.
+ text: |-
+ az machinelearningservices labeling-job resume --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices labeling-job wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices labeling-job is \
+met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices labeling-job is successfully \
+created.
+ text: |-
+ az machinelearningservices labeling-job wait --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices labeling-job is successfully \
+updated.
+ text: |-
+ az machinelearningservices labeling-job wait --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace" --updated
+"""
+
+helps['machinelearningservices model-container'] = """
+ type: group
+ short-summary: Manage model container with machinelearningservices
+"""
+
+helps['machinelearningservices model-container list'] = """
+ type: command
+ short-summary: "List model containers."
+ examples:
+ - name: List Model Container.
+ text: |-
+ az machinelearningservices model-container list --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices model-container show'] = """
+ type: command
+ short-summary: "Get container."
+ examples:
+ - name: Get Model Container.
+ text: |-
+ az machinelearningservices model-container show --name "testContainer" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-container create'] = """
+ type: command
+ short-summary: "Create container."
+ examples:
+ - name: CreateOrUpdate Model Container.
+ text: |-
+ az machinelearningservices model-container create --name "testContainer" --properties \
+description="Model container description" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-container update'] = """
+ type: command
+ short-summary: "Update container."
+"""
+
+helps['machinelearningservices model-container delete'] = """
+ type: command
+ short-summary: "Delete container."
+ examples:
+ - name: Delete Model Container.
+ text: |-
+ az machinelearningservices model-container delete --name "testContainer" --resource-group "testrg123" \
+--workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-version'] = """
+ type: group
+ short-summary: Manage model version with machinelearningservices
+"""
+
+helps['machinelearningservices model-version list'] = """
+ type: command
+ short-summary: "List model versions."
+ examples:
+ - name: List Model Version.
+ text: |-
+ az machinelearningservices model-version list --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-version show'] = """
+ type: command
+ short-summary: "Get version."
+ examples:
+ - name: Get Model Version.
+ text: |-
+ az machinelearningservices model-version show --name "testContainer" --resource-group "testrg123" \
+--version "1" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices model-version create'] = """
+ type: command
+ short-summary: "Create version."
+ examples:
+ - name: CreateOrUpdate Model Version.
+ text: |-
+ az machinelearningservices model-version create --name "testContainer" --properties \
+path="path/in/datastore" description="Model version description" datastoreId="/subscriptions/00000000-1111-2222-3333-44\
+4444444444/resourceGroups/testrg123/providers/Microsoft.MachineLearningServices/workspaces/workspace123/datastores/data\
+store123" flavors={"python_function":{"data":{"loader_module":"myLoaderModule"}}} properties={"prop1":"value1","prop2":\
+"value2"} tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --version "1" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices model-version update'] = """
+ type: command
+ short-summary: "Update version."
+"""
+
+helps['machinelearningservices model-version delete'] = """
+ type: command
+ short-summary: "Delete version."
+ examples:
+ - name: Delete Model Version.
+ text: |-
+ az machinelearningservices model-version delete --name "testContainer" --resource-group "testrg123" \
+--version "999" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint'] = """
+ type: group
+ short-summary: Manage online endpoint with machinelearningservices
+"""
+
+helps['machinelearningservices online-endpoint list'] = """
+ type: command
+ short-summary: "List Online Endpoints."
+ examples:
+ - name: List Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint list --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices online-endpoint show'] = """
+ type: command
+ short-summary: "Get Online Endpoint."
+ examples:
+ - name: Get Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint show --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint create'] = """
+ type: command
+ short-summary: "Create Online Endpoint (asynchronous)."
+ parameters:
+ - name: --keys
+ short-summary: "EndpointAuthKeys to set initially on an Endpoint. This property will always be returned as \
+null. AuthKey values must be retrieved using the ListKeys API."
+ long-summary: |
+ Usage: --keys primary-key=XX secondary-key=XX
+
+ primary-key: The primary key.
+ secondary-key: The secondary key.
+ examples:
+ - name: CreateOrUpdate Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties description="string" authMode="AMLToken" keys={"primaryKey":"string","seconda\
+ryKey":"string"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+target="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.Machi\
+neLearningServices/workspaces/testworkspace/computes/compute123" traffic={"myDeployment1":0,"myDeployment2":1} --tags \
+additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint update'] = """
+ type: command
+ short-summary: "Update Online Endpoint (asynchronous)."
+ examples:
+ - name: Update Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint update --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --traffic myDeployment1=0 myDeployment2=1 --tags additionalProp1="string" additionalProp2="string" \
+additionalProp3="string" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint delete'] = """
+ type: command
+ short-summary: "Delete Online Endpoint (asynchronous)."
+ examples:
+ - name: Delete Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint delete --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint get-token'] = """
+ type: command
+ short-summary: "Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication."
+ examples:
+ - name: GetToken Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint get-token --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint list-key'] = """
+ type: command
+ short-summary: "List EndpointAuthKeys for an Endpoint using Key-based authentication."
+ examples:
+ - name: ListKeys Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint list-key --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint regenerate-key'] = """
+ type: command
+ short-summary: "Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous)."
+ examples:
+ - name: RegenerateKeys Online Endpoint.
+ text: |-
+ az machinelearningservices online-endpoint regenerate-key --key-type "Primary" --key-value "string" \
+--endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-endpoint wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices online-endpoint \
+is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices online-endpoint is \
+successfully created.
+ text: |-
+ az machinelearningservices online-endpoint wait --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices online-endpoint is \
+successfully updated.
+ text: |-
+ az machinelearningservices online-endpoint wait --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices online-endpoint is \
+successfully deleted.
+ text: |-
+ az machinelearningservices online-endpoint wait --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123" --deleted
+"""
+
+helps['machinelearningservices online-deployment'] = """
+ type: group
+ short-summary: Manage online deployment with machinelearningservices
+"""
+
+helps['machinelearningservices online-deployment list'] = """
+ type: command
+ short-summary: "List Inference Endpoint Deployments."
+ examples:
+ - name: List Online Deployments.
+ text: |-
+ az machinelearningservices online-deployment list --endpoint-name "testEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment show'] = """
+ type: command
+ short-summary: "Get Inference Deployment Deployment."
+ examples:
+ - name: Get K8S Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+ - name: Get Managed Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment create'] = """
+ type: command
+ short-summary: "Create Inference Endpoint Deployment (asynchronous)."
+ examples:
+ - name: CreateOrUpdate K8S Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties "{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfigu\
+ration\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/provid\
+ers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/code123/versions/1\\",\\"scoringScript\\":\\"strin\
+g\\"},\\"containerResourceRequirements\\":{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\"memoryInGBLimit\\":64},\
+\\"endpointComputeType\\":\\"K8S\\",\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resource\
+Groups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/env123\\",\
+\\"livenessProbe\\":{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshol\
+d\\":50,\\"timeout\\":\\"PT1M\\"},\\"model\\":{\\"assetId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/res\
+ourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/models/model123\\",\
+\\"referenceType\\":\\"Id\\"},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\
+\\"additionalProp3\\":\\"string\\"},\\"provisioningState\\":\\"Creating\\",\\"requestSettings\\":{\\"maxConcurrentReque\
+stsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTimeout\\":\\"PT1M\\"},\\"scaleSettings\\":{\\"pollingInter\
+val\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"targetUtilizationPercentage\\":50}}" --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+ - name: CreateOrUpdate Managed Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties "{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfigu\
+ration\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/provid\
+ers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/code123/versions/1\\",\\"scoringScript\\":\\"strin\
+g\\"},\\"endpointComputeType\\":\\"Managed\\",\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-44444444444\
+4/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/e\
+nv123\\",\\"livenessProbe\\":{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"succes\
+sThreshold\\":50,\\"timeout\\":\\"PT1M\\"},\\"model\\":{\\"assetId\\":\\"/subscriptions/00000000-1111-2222-3333-4444444\
+44444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/models/mod\
+el123\\",\\"referenceType\\":\\"Id\\"},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"st\
+ring\\",\\"additionalProp3\\":\\"string\\"},\\"provisioningState\\":\\"Creating\\",\\"requestSettings\\":{\\"maxConcurr\
+entRequestsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTimeout\\":\\"PT1M\\"},\\"scaleSettings\\":{\\"poll\
+ingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"targetUtilizationPercentage\\":50}}" --tags \
+additionalProp1="string" additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" \
+--endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment update'] = """
+ type: command
+ short-summary: "Update Online Deployment (asynchronous)."
+ examples:
+ - name: Update K8S Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment update --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --properties "{\\"containerResourceRequirements\\":{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\"memory\
+InGBLimit\\":64},\\"endpointComputeType\\":\\"K8S\\",\\"scaleSettings\\":{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\
+\\":\\"Auto\\"}}" --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --deployment-name \
+"testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+ - name: Update Managed Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment update --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --properties "{\\"endpointComputeType\\":\\"Managed\\",\\"readinessProbe\\":{\\"failureThreshold\\":50,\\"init\
+ialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshold\\":50,\\"timeout\\":\\"PT1M\\"},\\"scaleSettings\\":\
+{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\"}}" --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment delete'] = """
+ type: command
+ short-summary: "Delete Inference Endpoint Deployment (asynchronous)."
+ examples:
+ - name: Delete Online Deployment.
+ text: |-
+ az machinelearningservices online-deployment delete --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+"""
+
+helps['machinelearningservices online-deployment get-log'] = """
+ type: command
+ short-summary: "Polls an Endpoint operation."
+ examples:
+ - name: Get Online Deployment Logs.
+ text: |-
+ az machinelearningservices online-deployment get-log --container-type "StorageInitializer" --tail 0 \
+--deployment-name "testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name \
+"workspace123"
+"""
+
+helps['machinelearningservices online-deployment wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices online-deployment \
+is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices online-deployment is \
+successfully created.
+ text: |-
+ az machinelearningservices online-deployment wait --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices online-deployment is \
+successfully updated.
+ text: |-
+ az machinelearningservices online-deployment wait --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices online-deployment is \
+successfully deleted.
+ text: |-
+ az machinelearningservices online-deployment wait --deployment-name "testDeployment" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123" --deleted
+"""
+
+helps['machinelearningservices workspace-feature'] = """
+ type: group
+ short-summary: Manage workspace feature with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-feature list'] = """
+ type: command
+ short-summary: "Lists all enabled features for a workspace."
+ examples:
+ - name: List Workspace features
+ text: |-
+ az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices workspace-sku'] = """
+ type: group
+ short-summary: Manage workspace sku with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-sku list'] = """
+ type: command
+ short-summary: "Lists all skus with associated features."
+ examples:
+ - name: List Skus
+ text: |-
+ az machinelearningservices workspace-sku list
+"""
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_params.py b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
new file mode 100644
index 00000000000..729f897df88
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
@@ -0,0 +1,1300 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+# pylint: disable=too-many-statements
+
+from azure.cli.core.commands.parameters import (
+ tags_type,
+ get_three_state_flag,
+ get_enum_type,
+ resource_group_name_type,
+ get_location_type
+)
+from azure.cli.core.commands.validators import (
+ get_default_location_from_resource_group,
+ validate_file_or_dict
+)
+from azext_machinelearningservices.action import (
+ AddSku,
+ AddSharedPrivateLinkResources,
+ AddIdentity,
+ AddKeyVaultProperties,
+ AddValue,
+ AddScaleSettings,
+ AddPrivateLinkServiceConnectionState,
+ AddKeys,
+ AddBatchendpointsProperties,
+ AddMachinelearningservicesBatchEndpointCreateTraffic,
+ AddMachinelearningservicesBatchEndpointUpdateTraffic,
+ AddCodeConfiguration,
+ AddEnvironmentVariables,
+ AddDataPathAssetReference,
+ AddIdAssetReference,
+ AddOutputPathAssetReference,
+ AddOutputConfiguration,
+ AddBatchdeploymentsProperties,
+ AddRetrySettings,
+ AddComputeConfigurationProperties,
+ AddCodecontainersProperties,
+ AddCodeversionsProperties,
+ AddDatacontainersProperties,
+ AddDataversionsProperties,
+ AddLinkedInfo,
+ AddDatastoresProperties,
+ AddEnvironmentcontainersProperties,
+ AddDockerBuild,
+ AddDockerImage,
+ AddEnvironmentspecificationversionsProperties,
+ AddLivenessRoute,
+ AddDatasetConfiguration,
+ AddLabelingJobImageProperties,
+ AddLabelingJobTextProperties,
+ AddLabelingjobsProperties,
+ AddCocoExportSummary,
+ AddCsvExportSummary,
+ AddDatasetExportSummary,
+ AddModelcontainersProperties,
+ AddModelversionsProperties,
+ AddProperties,
+ AddMachinelearningservicesOnlineEndpointCreateTraffic,
+ AddMachinelearningservicesOnlineEndpointUpdateTraffic
+)
+
+
+def load_arguments(self, _):
+
+ with self.argument_context('machinelearningservices workspace list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices workspace show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace. This name in mutable')
+ c.argument('key_vault', type=str, help='ARM id of the key vault associated with this workspace. This cannot be '
+ 'changed once the workspace has been created')
+ c.argument('application_insights', type=str, help='ARM id of the application insights associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('container_registry', type=str, help='ARM id of the container registry associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('storage_account', type=str, help='ARM id of the storage account associated with this workspace. '
+ 'This cannot be changed once the workspace has been created')
+ c.argument('discovery_url', type=str, help='Url for the discovery service to identify regional endpoints for '
+ 'machine learning experimentation services')
+ c.argument('hbi_workspace', arg_type=get_three_state_flag(), help='The flag to signal HBI data in the '
+ 'workspace and reduce diagnostic data collected by the service')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('allow_public_access_when_behind_vnet', arg_type=get_three_state_flag(), help='The flag to indicate '
+ 'whether to allow public access when behind VNet.')
+ c.argument('shared_private_link_resources', action=AddSharedPrivateLinkResources, nargs='+', help='The list of '
+ 'shared private link resources in this workspace.')
+ c.argument('primary_user_assigned_identity', type=str, help='The user assigned identity resource id that '
+ 'represents the workspace identity.')
+ c.argument('collections_throughput', type=int, help='The throughput of the collections in cosmosdb database',
+ arg_group='Service Managed Resources Settings Cosmos Db')
+ c.argument('status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or not the '
+ 'encryption is enabled for the workspace.', arg_group='Encryption')
+ c.argument('identity', action=AddIdentity, nargs='+', help='The identity that will be used to access the key '
+ 'vault for encryption at rest.', arg_group='Encryption')
+ c.argument('key_vault_properties', action=AddKeyVaultProperties, nargs='+', help='Customer Key vault '
+ 'properties.', arg_group='Encryption')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices workspace update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace.')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('primary_user_assigned_identity', type=str, help='The user assigned identity resource id that '
+ 'represents the workspace identity.')
+ c.argument('collections_throughput', type=int, help='The throughput of the collections in cosmosdb database',
+ arg_group='Service Managed Resources Settings Cosmos Db')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices workspace delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-notebook-access-token') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-notebook-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-storage-account-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace prepare-notebook') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace resync-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices usage list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices virtual-machine-size list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota update') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name')
+ c.argument('value', action=AddValue, nargs='+', help='The list for update quota.')
+ c.argument('quota_update_parameters_location', type=str, help='Region of workspace quota to be updated.',
+ id_part='name')
+
+ with self.argument_context('machinelearningservices compute list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices compute show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.')
+ c.argument('properties', type=validate_file_or_dict, help='Compute properties Expected value: '
+ 'json-string/@json-file.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices compute update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+ c.argument('scale_settings', action=AddScaleSettings, nargs='+', help='Desired scale settings for the '
+ 'amlCompute.')
+
+ with self.argument_context('machinelearningservices compute delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+ c.argument('underlying_resource_action', arg_type=get_enum_type(['Delete', 'Detach']), help='Delete the '
+ 'underlying compute if \'Delete\', or detach the underlying compute from workspace if \'Detach\'.')
+
+ with self.argument_context('machinelearningservices compute list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices compute list-node') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices compute restart') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute start') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute stop') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute update-schedule') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+ c.argument('compute_start_stop', type=validate_file_or_dict, help='The list of compute start stop schedules to '
+ 'be applied. Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices compute wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='A collection of information about the state of the connection between service consumer and '
+ 'provider.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='A collection of information about the state of the connection between service consumer and '
+ 'provider.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.ignore('properties')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-link-resource list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices workspace-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('auth_type', type=str, help='Authorization type of the workspace connection.')
+ c.argument('value', type=str, help='Value details of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices batch-endpoint list') as c:
+ c.argument('count', type=int, help='Number of endpoints to be retrieved in a page of results.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices batch-endpoint show') as c:
+ c.argument('endpoint_name', type=str, help='Name for the Batch Endpoint.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices batch-endpoint create') as c:
+ c.argument('endpoint_name', type=str, help='Name for the Batch inference endpoint.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('auth_mode', arg_type=get_enum_type(['AMLToken', 'Key', 'AADToken']), help='Enum to determine '
+ 'endpoint authentication mode.')
+ c.argument('description', type=str, help='Description of the inference endpoint.')
+ c.argument('keys', action=AddKeys, nargs='+', help='EndpointAuthKeys to set initially on an Endpoint. This '
+ 'property will always be returned as null. AuthKey values must be retrieved using the ListKeys API.')
+ c.argument('properties', action=AddBatchendpointsProperties, nargs='+', help='Property dictionary. Properties '
+ 'can be added, but not removed or altered. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('traffic', action=AddMachinelearningservicesBatchEndpointCreateTraffic, nargs='+', help='Traffic '
+ 'rules on how the traffic will be routed across deployments. Expect value: KEY1=VALUE1 KEY2=VALUE2 '
+ '...')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices batch-endpoint update') as c:
+ c.argument('endpoint_name', type=str, help='Name for the Batch inference endpoint.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('traffic', action=AddMachinelearningservicesBatchEndpointUpdateTraffic, nargs='+', help='Traffic '
+ 'rules on how the traffic will be routed across deployments. Expect value: KEY1=VALUE1 KEY2=VALUE2 '
+ '...')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices batch-endpoint delete') as c:
+ c.argument('endpoint_name', type=str, help='Inference Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices batch-endpoint list-key') as c:
+ c.argument('endpoint_name', type=str, help='Inference Endpoint name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices batch-deployment list') as c:
+ c.argument('endpoint_name', type=str, help='Endpoint name')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Top of list.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices batch-deployment show') as c:
+ c.argument('endpoint_name', type=str, help='Endpoint name', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='The identifier for the Batch deployments.',
+ id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices batch-deployment create') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name')
+ c.argument('deployment_name', type=str, help='The identifier for the Batch inference deployment.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('code_configuration', action=AddCodeConfiguration, nargs='+', help='Code configuration for the '
+ 'endpoint deployment.')
+ c.argument('description', type=str, help='Description of the endpoint deployment.')
+ c.argument('environment_id', type=str, help='ARM resource ID of the environment specification for the endpoint '
+ 'deployment.')
+ c.argument('environment_variables', action=AddEnvironmentVariables, nargs='+', help='Environment variables '
+ 'configuration for the deployment. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('error_threshold', type=int, help='Error threshold, if the error count for the entire input goes '
+ 'above this value, the batch inference will be aborted. Range is [-1, int.MaxValue]. For '
+ 'FileDataset, this value is the count of file failures. For TabularDataset, this value is the count '
+ 'of record failures. If set to -1 (the lower bound), all failures during batch inference will be '
+ 'ignored.')
+ c.argument('logging_level', arg_type=get_enum_type(['Info', 'Warning', 'Debug']), help='Logging level for '
+ 'batch inference operation.')
+ c.argument('mini_batch_size', type=int, help='Size of the mini-batch passed to each batch invocation. For '
+ 'FileDataset, this is the number of files per mini-batch. For TabularDataset, this is the size of '
+ 'the records in bytes, per mini-batch.')
+ c.argument('data_path_asset_reference', action=AddDataPathAssetReference, nargs='+', help='Reference to an '
+ 'asset via its path in a datastore.', arg_group='Model')
+ c.argument('id_asset_reference', action=AddIdAssetReference, nargs='+', help='Reference to an asset via its '
+ 'ARM resource ID.', arg_group='Model')
+ c.argument('output_path_asset_reference', action=AddOutputPathAssetReference, nargs='+', help='Reference to an '
+ 'asset via its path in a job output.', arg_group='Model')
+ c.argument('output_configuration', action=AddOutputConfiguration, nargs='+', help='Output configuration for '
+ 'the batch inference operation.')
+ c.argument('partition_keys', nargs='+', help='Partition keys list used for Named partitioning.')
+ c.argument('properties', action=AddBatchdeploymentsProperties, nargs='+', help='Property dictionary. '
+ 'Properties can be added, but not removed or altered. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('retry_settings', action=AddRetrySettings, nargs='+', help='Retry Settings for the batch inference '
+ 'operation.')
+ c.argument('instance_count', type=int, help='Number of instances or nodes.', arg_group='Compute')
+ c.argument('instance_type', type=str, help='SKU type to run on.', arg_group='Compute')
+ c.argument('is_local', arg_type=get_three_state_flag(), help='Set to true for jobs running on local compute.',
+ arg_group='Compute')
+ c.argument('compute_configuration_location', type=str, help='Location for virtual cluster run.',
+ arg_group='Compute')
+ c.argument('compute_configuration_properties', action=AddComputeConfigurationProperties, nargs='+',
+ help='Additional properties. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...', arg_group='Compute')
+ c.argument('target', type=str, help='ARM resource ID of the compute resource.', arg_group='Compute')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices batch-deployment update') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='The identifier for the Batch inference deployment.',
+ id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('description', type=str, help='Description of the endpoint deployment.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices batch-deployment delete') as c:
+ c.argument('endpoint_name', type=str, help='Endpoint name', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference deployment identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices code-container list') as c:
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices code-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices code-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddCodecontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices code-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddCodecontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices code-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices code-version list') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices code-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices code-version create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('version', type=str, help='Version identifier.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('datastore_id', type=str, help='ARM resource ID of the datastore where the asset is located.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('path', type=str, help='The path of the file/directory in the datastore.')
+ c.argument('properties', action=AddCodeversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices code-version update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('datastore_id', type=str, help='ARM resource ID of the datastore where the asset is located.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('path', type=str, help='The path of the file/directory in the datastore.')
+ c.argument('properties', action=AddCodeversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices code-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices data-container list') as c:
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices data-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices data-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddDatacontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices data-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddDatacontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices data-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices data-version list') as c:
+ c.argument('name', type=str, help='Data name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('tags', tags_type)
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices data-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices data-version create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('version', type=str, help='Version identifier.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('dataset_type', arg_type=get_enum_type(['Simple', 'Dataflow']), help='The Format of dataset.')
+ c.argument('datastore_id', type=str, help='ARM resource ID of the datastore where the asset is located.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('path', type=str, help='The path of the file/directory in the datastore.')
+ c.argument('properties', action=AddDataversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices data-version update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('dataset_type', arg_type=get_enum_type(['Simple', 'Dataflow']), help='The Format of dataset.')
+ c.argument('datastore_id', type=str, help='ARM resource ID of the datastore where the asset is located.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('path', type=str, help='The path of the file/directory in the datastore.')
+ c.argument('properties', action=AddDataversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices data-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices datastore list') as c:
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('count', type=int, help='Maximum number of results to return.')
+ c.argument('is_default', arg_type=get_three_state_flag(), help='Filter down to the workspace default '
+ 'datastore.')
+ c.argument('names', nargs='+', help='Names of datastores to return.')
+ c.argument('search_text', type=str, help='Text to search for in the datastore names.')
+ c.argument('order_by', type=str, help='Order by property (createdtime | modifiedtime | name).')
+ c.argument('order_by_asc', arg_type=get_three_state_flag(), help='Order by property in ascending order.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices datastore show') as c:
+ c.argument('name', type=str, help='Datastore name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices datastore create') as c:
+ c.argument('name', type=str, help='Datastore name.')
+ c.argument('skip_validation', arg_type=get_three_state_flag(), help='Flag to skip validation.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('contents', type=validate_file_or_dict, help='Reference to the datastore storage contents. Expected '
+ 'value: json-string/@json-file.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('is_default', arg_type=get_three_state_flag(), help='Whether this datastore is the default for the '
+ 'workspace.')
+ c.argument('linked_info', action=AddLinkedInfo, nargs='+', help='Information about the datastore origin, if '
+ 'linked.')
+ c.argument('properties', action=AddDatastoresProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices datastore update') as c:
+ c.argument('name', type=str, help='Datastore name.', id_part='child_name_1')
+ c.argument('skip_validation', arg_type=get_three_state_flag(), help='Flag to skip validation.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('contents', type=validate_file_or_dict, help='Reference to the datastore storage contents. Expected '
+ 'value: json-string/@json-file.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('is_default', arg_type=get_three_state_flag(), help='Whether this datastore is the default for the '
+ 'workspace.')
+ c.argument('linked_info', action=AddLinkedInfo, nargs='+', help='Information about the datastore origin, if '
+ 'linked.')
+ c.argument('properties', action=AddDatastoresProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices datastore delete') as c:
+ c.argument('name', type=str, help='Datastore name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices datastore list-secret') as c:
+ c.argument('name', type=str, help='Datastore name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices environment-container list') as c:
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices environment-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices environment-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddEnvironmentcontainersProperties, nargs='+', help='The asset property '
+ 'dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices environment-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddEnvironmentcontainersProperties, nargs='+', help='The asset property '
+ 'dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices environment-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices environment-specification-version list') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices environment-specification-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices environment-specification-version create') as c:
+ c.argument('name', type=str, help='Name of EnvironmentSpecificationVersion.')
+ c.argument('version', type=str, help='Version of EnvironmentSpecificationVersion.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('conda_file', type=str, help='Standard configuration file used by Conda that lets you install any '
+ 'kind of package, including Python, R, and C/C++ packages. ')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('docker_build', action=AddDockerBuild, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('docker_image', action=AddDockerImage, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('properties', action=AddEnvironmentspecificationversionsProperties, nargs='+', help='The asset '
+ 'property dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('liveness_route', action=AddLivenessRoute, nargs='+', help='The route to check the liveness of the '
+ 'inference server container.', arg_group='Inference Container Properties')
+ c.argument('readiness_route', action=AddLivenessRoute, nargs='+', help='The route to check the readiness of '
+ 'the inference server container.', arg_group='Inference Container Properties')
+ c.argument('scoring_route', action=AddLivenessRoute, nargs='+', help='The port to send the scoring requests '
+ 'to, within the inference server container.', arg_group='Inference Container Properties')
+
+ with self.argument_context('machinelearningservices environment-specification-version update') as c:
+ c.argument('name', type=str, help='Name of EnvironmentSpecificationVersion.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version of EnvironmentSpecificationVersion.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('conda_file', type=str, help='Standard configuration file used by Conda that lets you install any '
+ 'kind of package, including Python, R, and C/C++ packages. ')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('docker_build', action=AddDockerBuild, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('docker_image', action=AddDockerImage, nargs='+', help='Class to represent configuration settings '
+ 'for Docker Build', arg_group='Docker')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('properties', action=AddEnvironmentspecificationversionsProperties, nargs='+', help='The asset '
+ 'property dictionary. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('liveness_route', action=AddLivenessRoute, nargs='+', help='The route to check the liveness of the '
+ 'inference server container.', arg_group='Inference Container Properties')
+ c.argument('readiness_route', action=AddLivenessRoute, nargs='+', help='The route to check the readiness of '
+ 'the inference server container.', arg_group='Inference Container Properties')
+ c.argument('scoring_route', action=AddLivenessRoute, nargs='+', help='The port to send the scoring requests '
+ 'to, within the inference server container.', arg_group='Inference Container Properties')
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices environment-specification-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job list') as c:
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('job_type', type=str, help='Type of job to be returned.')
+ c.argument('tags', tags_type)
+ c.argument('tag', type=str, help='Jobs returned will have this tag key.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices job show') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job create') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('properties', type=validate_file_or_dict, help='Additional attributes of the entity. Expected '
+ 'value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices job update') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('properties', type=validate_file_or_dict, help='Additional attributes of the entity. Expected '
+ 'value: json-string/@json-file.')
+ c.ignore('id', 'body')
+
+ with self.argument_context('machinelearningservices job delete') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job cancel') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices job wait') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the Job.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job list') as c:
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('count', type=int, help='Number of labeling jobs to return.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices labeling-job show') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('include_job_instructions', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'whether to include JobInstructions in response.')
+ c.argument('include_label_categories', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'Whether to include LabelCategories in response.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job create') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('dataset_configuration', action=AddDatasetConfiguration, nargs='+', help='Configuration of dataset '
+ 'used in the job.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('job_type', arg_type=get_enum_type(['Command', 'Sweep', 'Labeling']), help='Specifies the type of '
+ 'job. This field should always be set to "Labeling".')
+ c.argument('label_categories', type=validate_file_or_dict, help='Label categories of the job. Expected value: '
+ 'json-string/@json-file.')
+ c.argument('labeling_job_image_properties', action=AddLabelingJobImageProperties, nargs='+', help='Properties '
+ 'of a labeling job for image data', arg_group='LabelingJobMediaProperties')
+ c.argument('labeling_job_text_properties', action=AddLabelingJobTextProperties, nargs='+', help='Properties of '
+ 'a labeling job for text data', arg_group='LabelingJobMediaProperties')
+ c.argument('properties', action=AddLabelingjobsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('inferencing_compute_binding', type=validate_file_or_dict, help='AML compute binding used in '
+ 'inferencing. Expected value: json-string/@json-file.', arg_group='Ml Assist Configuration')
+ c.argument('ml_assist_enabled', arg_type=get_three_state_flag(), help='Indicates whether MLAssist feature is '
+ 'enabled.', arg_group='Ml Assist Configuration')
+ c.argument('training_compute_binding', type=validate_file_or_dict, help='AML compute binding used in training. '
+ 'Expected value: json-string/@json-file.', arg_group='Ml Assist Configuration')
+ c.argument('uri', type=str, help='The link to a page with detailed labeling instructions for labelers.',
+ arg_group='Job Instructions')
+
+ with self.argument_context('machinelearningservices labeling-job update') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('dataset_configuration', action=AddDatasetConfiguration, nargs='+', help='Configuration of dataset '
+ 'used in the job.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('job_type', arg_type=get_enum_type(['Command', 'Sweep', 'Labeling']), help='Specifies the type of '
+ 'job. This field should always be set to "Labeling".')
+ c.argument('label_categories', type=validate_file_or_dict, help='Label categories of the job. Expected value: '
+ 'json-string/@json-file.')
+ c.argument('labeling_job_image_properties', action=AddLabelingJobImageProperties, nargs='+', help='Properties '
+ 'of a labeling job for image data', arg_group='LabelingJobMediaProperties')
+ c.argument('labeling_job_text_properties', action=AddLabelingJobTextProperties, nargs='+', help='Properties of '
+ 'a labeling job for text data', arg_group='LabelingJobMediaProperties')
+ c.argument('properties', action=AddLabelingjobsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.argument('inferencing_compute_binding', type=validate_file_or_dict, help='AML compute binding used in '
+ 'inferencing. Expected value: json-string/@json-file.', arg_group='Ml Assist Configuration')
+ c.argument('ml_assist_enabled', arg_type=get_three_state_flag(), help='Indicates whether MLAssist feature is '
+ 'enabled.', arg_group='Ml Assist Configuration')
+ c.argument('training_compute_binding', type=validate_file_or_dict, help='AML compute binding used in training. '
+ 'Expected value: json-string/@json-file.', arg_group='Ml Assist Configuration')
+ c.argument('uri', type=str, help='The link to a page with detailed labeling instructions for labelers.',
+ arg_group='Job Instructions')
+ c.ignore('id', 'body')
+
+ with self.argument_context('machinelearningservices labeling-job delete') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job export-label') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('coco_export_summary', action=AddCocoExportSummary, nargs='+', help=' Expect value: KEY1=VALUE1 '
+ 'KEY2=VALUE2 ...', arg_group='Body')
+ c.argument('csv_export_summary', action=AddCsvExportSummary, nargs='+', help=' Expect value: KEY1=VALUE1 '
+ 'KEY2=VALUE2 ...', arg_group='Body')
+ c.argument('dataset_export_summary', action=AddDatasetExportSummary, nargs='+', help=' Expect value: '
+ 'KEY1=VALUE1 KEY2=VALUE2 ...', arg_group='Body')
+
+ with self.argument_context('machinelearningservices labeling-job pause') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job resume') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices labeling-job wait') as c:
+ c.argument('id_', options_list=['--id'], type=str, help='The name and identifier for the LabelingJob.',
+ id_part='child_name_1')
+ c.argument('include_job_instructions', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'whether to include JobInstructions in response.')
+ c.argument('include_label_categories', arg_type=get_three_state_flag(), help='Boolean value to indicate '
+ 'Whether to include LabelCategories in response.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-container list') as c:
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('count', type=int, help='Maximum number of results to return.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices model-container show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-container create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddModelcontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices model-container update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('properties', action=AddModelcontainersProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices model-container delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-version list') as c:
+ c.argument('name', type=str, help='Model name.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Maximum number of records to return.')
+ c.argument('version', type=str, help='Model version.')
+ c.argument('description', type=str, help='Model description.')
+ c.argument('offset', type=int, help='Number of initial results to skip.')
+ c.argument('tags', tags_type)
+ c.argument('properties', type=str, help='Comma-separated list of property names (and optionally values). '
+ 'Example: prop1,prop2=value2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices model-version show') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices model-version create') as c:
+ c.argument('name', type=str, help='Container name.')
+ c.argument('version', type=str, help='Version identifier.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('datastore_id', type=str, help='ARM resource ID of the datastore where the asset is located.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('flavors', type=validate_file_or_dict, help='Mapping of model flavors to their properties. Expected '
+ 'value: json-string/@json-file.')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('path', type=str, help='The path of the file/directory in the datastore.')
+ c.argument('properties', action=AddModelversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('machinelearningservices model-version update') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('datastore_id', type=str, help='ARM resource ID of the datastore where the asset is located.')
+ c.argument('description', type=str, help='The asset description text.')
+ c.argument('flavors', type=validate_file_or_dict, help='Mapping of model flavors to their properties. Expected '
+ 'value: json-string/@json-file.')
+ c.argument('is_anonymous', arg_type=get_three_state_flag(), help='If the name version are system generated '
+ '(anonymous registration).')
+ c.argument('path', type=str, help='The path of the file/directory in the datastore.')
+ c.argument('properties', action=AddModelversionsProperties, nargs='+', help='The asset property dictionary. '
+ 'Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('tags', tags_type)
+ c.ignore('body')
+
+ with self.argument_context('machinelearningservices model-version delete') as c:
+ c.argument('name', type=str, help='Container name.', id_part='child_name_1')
+ c.argument('version', type=str, help='Version identifier.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint list') as c:
+ c.argument('name', type=str, help='Name of the endpoint.')
+ c.argument('count', type=int, help='Number of endpoints to be retrieved in a page of results.')
+ c.argument('compute_type', arg_type=get_enum_type(['Managed', 'K8S', 'AzureMLCompute']),
+ help='EndpointComputeType to be filtered by.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('tags', tags_type)
+ c.argument('properties', type=str, help='A set of properties with which to filter the returned models. It is a '
+ 'comma separated string of properties key and/or properties key=value Example: '
+ 'propKey1,propKey2,propKey3=value3 .')
+ c.argument('order_by', arg_type=get_enum_type(['CreatedAtDesc', 'CreatedAtAsc', 'UpdatedAtDesc',
+ 'UpdatedAtAsc']), help='The option to order the response.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices online-endpoint show') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint create') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('auth_mode', arg_type=get_enum_type(['AMLToken', 'Key', 'AADToken']), help='Inference endpoint '
+ 'authentication mode type')
+ c.argument('description', type=str, help='Description of the inference endpoint.')
+ c.argument('keys', action=AddKeys, nargs='+', help='EndpointAuthKeys to set initially on an Endpoint. This '
+ 'property will always be returned as null. AuthKey values must be retrieved using the ListKeys API.')
+ c.argument('properties', action=AddProperties, nargs='+', help='Property dictionary. Properties can be added, '
+ 'but not removed or altered. Expect value: KEY1=VALUE1 KEY2=VALUE2 ...')
+ c.argument('target', type=str, help='ARM resource ID of the compute if it exists. optional')
+ c.argument('traffic', action=AddMachinelearningservicesOnlineEndpointCreateTraffic, nargs='+', help='Traffic '
+ 'rules on how the traffic will be routed across deployments. Expect value: KEY1=VALUE1 KEY2=VALUE2 '
+ '...')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-endpoint update') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('traffic', action=AddMachinelearningservicesOnlineEndpointUpdateTraffic, nargs='+', help='Traffic '
+ 'rules on how the traffic will be routed across deployments. Expect value: KEY1=VALUE1 KEY2=VALUE2 '
+ '...')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-endpoint delete') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint get-token') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-endpoint list-key') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices online-endpoint regenerate-key') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('key_type', arg_type=get_enum_type(['Primary', 'Secondary']), help='Specification for which type of '
+ 'key to generate. Primary or Secondary.')
+ c.argument('key_value', type=str, help='The value the key is set to.')
+
+ with self.argument_context('machinelearningservices online-endpoint wait') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-deployment list') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.')
+ c.argument('order_by', type=str, help='Ordering of list.')
+ c.argument('top', type=int, help='Top of list.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices online-deployment show') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-deployment create') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('tags', tags_type)
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('properties', type=validate_file_or_dict, help='Additional attributes of the entity. Expected '
+ 'value: json-string/@json-file.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-deployment update') as c:
+ c.argument('endpoint_name', type=str, help='Online Endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('kind', type=str, help='Metadata used by portal/tooling/etc to render different UX experiences for '
+ 'resources of the same type.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('properties', type=validate_file_or_dict, help='Additional attributes of the entity. Expected '
+ 'value: json-string/@json-file.')
+ c.argument('tags', tags_type)
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned', 'None']),
+ help='Defines values for a ResourceIdentity\'s type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='Dictionary of the user assigned '
+ 'identities, key is ARM resource ID of the UAI. Expected value: json-string/@json-file.',
+ arg_group='Identity')
+
+ with self.argument_context('machinelearningservices online-deployment delete') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices online-deployment get-log') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='The name and identifier for the endpoint.',
+ id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('container_type', arg_type=get_enum_type(['StorageInitializer', 'InferenceServer']), help='The type '
+ 'of container to retrieve logs from.')
+ c.argument('tail', type=int, help='The maximum number of lines to tail.')
+
+ with self.argument_context('machinelearningservices online-deployment wait') as c:
+ c.argument('endpoint_name', type=str, help='Inference endpoint name.', id_part='child_name_1')
+ c.argument('deployment_name', type=str, help='Inference Endpoint Deployment name.', id_part='child_name_2')
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace-feature list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
new file mode 100644
index 00000000000..b33a44c1ebf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
@@ -0,0 +1,9 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/action.py b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
new file mode 100644
index 00000000000..41b43f2fd14
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
@@ -0,0 +1,1047 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access
+
+import argparse
+from collections import defaultdict
+from knack.util import CLIError
+
+
+class AddSku(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sku = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'tier':
+ d['tier'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, tier'.
+ format(k))
+ return d
+
+
+class AddSharedPrivateLinkResources(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddSharedPrivateLinkResources, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'private-link-resource-id':
+ d['private_link_resource_id'] = v[0]
+ elif kl == 'group-id':
+ d['group_id'] = v[0]
+ elif kl == 'request-message':
+ d['request_message'] = v[0]
+ elif kl == 'status':
+ d['status'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter shared_private_link_resources. All '
+ 'possible keys are: name, private-link-resource-id, group-id, request-message, status'.
+ format(k))
+ return d
+
+
+class AddIdentity(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.identity = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'user-assigned-identity':
+ d['user_assigned_identity'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: '
+ 'user-assigned-identity'.format(k))
+ return d
+
+
+class AddKeyVaultProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.key_vault_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'key-vault-arm-id':
+ d['key_vault_arm_id'] = v[0]
+ elif kl == 'key-identifier':
+ d['key_identifier'] = v[0]
+ elif kl == 'identity-client-id':
+ d['identity_client_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter key_vault_properties. All possible keys '
+ 'are: key-vault-arm-id, key-identifier, identity-client-id'.format(k))
+ return d
+
+
+class AddValue(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddValue, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'id':
+ d['id'] = v[0]
+ elif kl == 'type':
+ d['type'] = v[0]
+ elif kl == 'limit':
+ d['limit'] = v[0]
+ elif kl == 'unit':
+ d['unit'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter value. All possible keys are: id, type, '
+ 'limit, unit'.format(k))
+ return d
+
+
+class AddScaleSettings(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scale_settings = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['min_node_count'] = 0
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'max-node-count':
+ d['max_node_count'] = v[0]
+ elif kl == 'min-node-count':
+ d['min_node_count'] = v[0]
+ elif kl == 'node-idle-time-before-scale-down':
+ d['node_idle_time_before_scale_down'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scale_settings. All possible keys are: '
+ 'max-node-count, min-node-count, node-idle-time-before-scale-down'.format(k))
+ return d
+
+
+class AddPrivateLinkServiceConnectionState(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.private_link_service_connection_state = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'status':
+ d['status'] = v[0]
+ elif kl == 'description':
+ d['description'] = v[0]
+ elif kl == 'actions-required':
+ d['actions_required'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. '
+ 'All possible keys are: status, description, actions-required'.format(k))
+ return d
+
+
+class AddKeys(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.keys = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'primary-key':
+ d['primary_key'] = v[0]
+ elif kl == 'secondary-key':
+ d['secondary_key'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter keys. All possible keys are: primary-key, '
+ 'secondary-key'.format(k))
+ return d
+
+
+class AddBatchendpointsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddMachinelearningservicesBatchEndpointCreateTraffic(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.traffic = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddMachinelearningservicesBatchEndpointUpdateTraffic(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.traffic = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddCodeConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.code_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'code-id':
+ d['code_id'] = v[0]
+ elif kl == 'scoring-script':
+ d['scoring_script'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter code_configuration. All possible keys '
+ 'are: code-id, scoring-script'.format(k))
+ return d
+
+
+class AddEnvironmentVariables(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.environment_variables = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddDataPathAssetReference(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.data_path_asset_reference = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'datastore-id':
+ d['datastore_id'] = v[0]
+ elif kl == 'path':
+ d['path'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter data_path_asset_reference. All possible '
+ 'keys are: datastore-id, path'.format(k))
+ d['reference_type'] = 'DataPath'
+ return d
+
+
+class AddIdAssetReference(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.id_asset_reference = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'asset-id':
+ d['asset_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter id_asset_reference. All possible keys '
+ 'are: asset-id'.format(k))
+ d['reference_type'] = 'Id'
+ return d
+
+
+class AddOutputPathAssetReference(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.output_path_asset_reference = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'job-id':
+ d['job_id'] = v[0]
+ elif kl == 'path':
+ d['path'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter output_path_asset_reference. All possible '
+ 'keys are: job-id, path'.format(k))
+ d['reference_type'] = 'OutputPath'
+ return d
+
+
+class AddOutputConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.output_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'append-row-file-name':
+ d['append_row_file_name'] = v[0]
+ elif kl == 'output-action':
+ d['output_action'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter output_configuration. All possible keys '
+ 'are: append-row-file-name, output-action'.format(k))
+ return d
+
+
+class AddBatchdeploymentsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddRetrySettings(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.retry_settings = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'max-retries':
+ d['max_retries'] = v[0]
+ elif kl == 'timeout':
+ d['timeout'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter retry_settings. All possible keys are: '
+ 'max-retries, timeout'.format(k))
+ return d
+
+
+class AddComputeConfigurationProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.compute_configuration_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddCodecontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddCodeversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddDatacontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddDataversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddLinkedInfo(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.linked_info = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'linked-id':
+ d['linked_id'] = v[0]
+ elif kl == 'linked-resource-name':
+ d['linked_resource_name'] = v[0]
+ elif kl == 'origin':
+ d['origin'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter linked_info. All possible keys are: '
+ 'linked-id, linked-resource-name, origin'.format(k))
+ return d
+
+
+class AddDatastoresProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddEnvironmentcontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddDockerBuild(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.docker_build = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'context':
+ d['context'] = v[0]
+ elif kl == 'dockerfile':
+ d['dockerfile'] = v[0]
+ elif kl == 'operating-system-type':
+ d['operating_system_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter docker_build. All possible keys are: '
+ 'context, dockerfile, operating-system-type'.format(k))
+ d['docker_specification_type'] = 'Build'
+ return d
+
+
+class AddDockerImage(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.docker_image = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'docker-image-uri':
+ d['docker_image_uri'] = v[0]
+ elif kl == 'operating-system-type':
+ d['operating_system_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter docker_image. All possible keys are: '
+ 'docker-image-uri, operating-system-type'.format(k))
+ d['docker_specification_type'] = 'Image'
+ return d
+
+
+class AddEnvironmentspecificationversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddLivenessRoute(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.liveness_route = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'path':
+ d['path'] = v[0]
+ elif kl == 'port':
+ d['port'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter liveness_route. All possible keys are: '
+ 'path, port'.format(k))
+ return d
+
+
+class AddDatasetConfiguration(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.dataset_configuration = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'asset-name':
+ d['asset_name'] = v[0]
+ elif kl == 'dataset-version':
+ d['dataset_version'] = v[0]
+ elif kl == 'incremental-dataset-refresh-enabled':
+ d['incremental_dataset_refresh_enabled'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter dataset_configuration. All possible keys '
+ 'are: asset-name, dataset-version, incremental-dataset-refresh-enabled'.format(k))
+ return d
+
+
+class AddLabelingJobImageProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.labeling_job_image_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'annotation-type':
+ d['annotation_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter labeling_job_image_properties. All '
+ 'possible keys are: annotation-type'.format(k))
+ d['media_type'] = 'Image'
+ return d
+
+
+class AddLabelingJobTextProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.labeling_job_text_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'annotation-type':
+ d['annotation_type'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter labeling_job_text_properties. All '
+ 'possible keys are: annotation-type'.format(k))
+ d['media_type'] = 'Text'
+ return d
+
+
+class AddLabelingjobsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddCocoExportSummary(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.coco_export_summary = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['format'] = 'Coco'
+ return d
+
+
+class AddCsvExportSummary(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.csv_export_summary = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['format'] = 'CSV'
+ return d
+
+
+class AddDatasetExportSummary(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.dataset_export_summary = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ d['format'] = 'Dataset'
+ return d
+
+
+class AddModelcontainersProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddModelversionsProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddMachinelearningservicesOnlineEndpointCreateTraffic(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.traffic = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
+
+
+class AddMachinelearningservicesOnlineEndpointUpdateTraffic(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.traffic = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ v = properties[k]
+ d[k] = v[0]
+ return d
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/commands.py b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
new file mode 100644
index 00000000000..33b7256b596
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
@@ -0,0 +1,372 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-statements
+# pylint: disable=too-many-locals
+
+from azure.cli.core.commands import CliCommandType
+
+
+def load_command_table(self, _):
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace
+ machinelearningservices_workspace = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspaces_ope'
+ 'rations#WorkspacesOperations.{}',
+ client_factory=cf_workspace)
+ with self.command_group('machinelearningservices workspace', machinelearningservices_workspace,
+ client_factory=cf_workspace) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_show')
+ g.custom_command('create', 'machinelearningservices_workspace_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_workspace_update')
+ g.custom_command('delete', 'machinelearningservices_workspace_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_workspace_list_key')
+ g.custom_command('list-notebook-access-token', 'machinelearningservices_workspace_list_notebook_access_token')
+ g.custom_command('list-notebook-key', 'machinelearningservices_workspace_list_notebook_key')
+ g.custom_command('list-storage-account-key', 'machinelearningservices_workspace_list_storage_account_key')
+ g.custom_command('prepare-notebook', 'machinelearningservices_workspace_prepare_notebook',
+ supports_no_wait=True)
+ g.custom_command('resync-key', 'machinelearningservices_workspace_resync_key', supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_workspace_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_usage
+ machinelearningservices_usage = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._usages_operati'
+ 'ons#UsagesOperations.{}',
+ client_factory=cf_usage)
+ with self.command_group('machinelearningservices usage', machinelearningservices_usage,
+ client_factory=cf_usage) as g:
+ g.custom_command('list', 'machinelearningservices_usage_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_virtual_machine_size
+ machinelearningservices_virtual_machine_size = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._virtual_machin'
+ 'e_sizes_operations#VirtualMachineSizesOperations.{}',
+ client_factory=cf_virtual_machine_size)
+ with self.command_group('machinelearningservices virtual-machine-size',
+ machinelearningservices_virtual_machine_size,
+ client_factory=cf_virtual_machine_size) as g:
+ g.custom_command('list', 'machinelearningservices_virtual_machine_size_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_quota
+ machinelearningservices_quota = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._quotas_operati'
+ 'ons#QuotasOperations.{}',
+ client_factory=cf_quota)
+ with self.command_group('machinelearningservices quota', machinelearningservices_quota,
+ client_factory=cf_quota) as g:
+ g.custom_command('list', 'machinelearningservices_quota_list')
+ g.custom_command('update', 'machinelearningservices_quota_update')
+
+ from azext_machinelearningservices.generated._client_factory import cf_compute
+ machinelearningservices_compute = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._compute_operat'
+ 'ions#ComputeOperations.{}',
+ client_factory=cf_compute)
+ with self.command_group('machinelearningservices compute', machinelearningservices_compute,
+ client_factory=cf_compute) as g:
+ g.custom_command('list', 'machinelearningservices_compute_list')
+ g.custom_show_command('show', 'machinelearningservices_compute_show')
+ g.custom_command('create', 'machinelearningservices_compute_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_compute_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_compute_delete', supports_no_wait=True, confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_compute_list_key')
+ g.custom_command('list-node', 'machinelearningservices_compute_list_node')
+ g.custom_command('restart', 'machinelearningservices_compute_restart')
+ g.custom_command('start', 'machinelearningservices_compute_start', supports_no_wait=True)
+ g.custom_command('stop', 'machinelearningservices_compute_stop', supports_no_wait=True)
+ g.custom_command('update-schedule', 'machinelearningservices_compute_update_schedule')
+ g.custom_wait_command('wait', 'machinelearningservices_compute_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_endpoint_connection
+ machinelearningservices_private_endpoint_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_endpoi'
+ 'nt_connections_operations#PrivateEndpointConnectionsOperations.{}',
+ client_factory=cf_private_endpoint_connection)
+ with self.command_group('machinelearningservices private-endpoint-connection',
+ machinelearningservices_private_endpoint_connection,
+ client_factory=cf_private_endpoint_connection) as g:
+ g.custom_command('list', 'machinelearningservices_private_endpoint_connection_list')
+ g.custom_show_command('show', 'machinelearningservices_private_endpoint_connection_show')
+ g.custom_command('create', 'machinelearningservices_private_endpoint_connection_create')
+ g.generic_update_command('update', setter_arg_name='properties',
+ custom_func_name='machinelearningservices_private_endpoint_connection_update')
+ g.custom_command('delete', 'machinelearningservices_private_endpoint_connection_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_link_resource
+ machinelearningservices_private_link_resource = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_link_r'
+ 'esources_operations#PrivateLinkResourcesOperations.{}',
+ client_factory=cf_private_link_resource)
+ with self.command_group('machinelearningservices private-link-resource',
+ machinelearningservices_private_link_resource,
+ client_factory=cf_private_link_resource) as g:
+ g.custom_command('list', 'machinelearningservices_private_link_resource_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_connection
+ machinelearningservices_workspace_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_conn'
+ 'ections_operations#WorkspaceConnectionsOperations.{}',
+ client_factory=cf_workspace_connection)
+ with self.command_group('machinelearningservices workspace-connection',
+ machinelearningservices_workspace_connection,
+ client_factory=cf_workspace_connection) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_connection_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_connection_show')
+ g.custom_command('create', 'machinelearningservices_workspace_connection_create')
+ g.custom_command('delete', 'machinelearningservices_workspace_connection_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_batch_endpoint
+ machinelearningservices_batch_endpoint = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._batch_endpoint'
+ 's_operations#BatchEndpointsOperations.{}',
+ client_factory=cf_batch_endpoint)
+ with self.command_group('machinelearningservices batch-endpoint', machinelearningservices_batch_endpoint,
+ client_factory=cf_batch_endpoint) as g:
+ g.custom_command('list', 'machinelearningservices_batch_endpoint_list')
+ g.custom_show_command('show', 'machinelearningservices_batch_endpoint_show')
+ g.custom_command('create', 'machinelearningservices_batch_endpoint_create')
+ g.custom_command('update', 'machinelearningservices_batch_endpoint_update')
+ g.custom_command('delete', 'machinelearningservices_batch_endpoint_delete', confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_batch_endpoint_list_key')
+
+ from azext_machinelearningservices.generated._client_factory import cf_batch_deployment
+ machinelearningservices_batch_deployment = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._batch_deployme'
+ 'nts_operations#BatchDeploymentsOperations.{}',
+ client_factory=cf_batch_deployment)
+ with self.command_group('machinelearningservices batch-deployment', machinelearningservices_batch_deployment,
+ client_factory=cf_batch_deployment) as g:
+ g.custom_command('list', 'machinelearningservices_batch_deployment_list')
+ g.custom_show_command('show', 'machinelearningservices_batch_deployment_show')
+ g.custom_command('create', 'machinelearningservices_batch_deployment_create')
+ g.custom_command('update', 'machinelearningservices_batch_deployment_update')
+ g.custom_command('delete', 'machinelearningservices_batch_deployment_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_code_container
+ machinelearningservices_code_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._code_container'
+ 's_operations#CodeContainersOperations.{}',
+ client_factory=cf_code_container)
+ with self.command_group('machinelearningservices code-container', machinelearningservices_code_container,
+ client_factory=cf_code_container) as g:
+ g.custom_command('list', 'machinelearningservices_code_container_list')
+ g.custom_show_command('show', 'machinelearningservices_code_container_show')
+ g.custom_command('create', 'machinelearningservices_code_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_code_conta'
+ 'iner_update')
+ g.custom_command('delete', 'machinelearningservices_code_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_code_version
+ machinelearningservices_code_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._code_versions_'
+ 'operations#CodeVersionsOperations.{}',
+ client_factory=cf_code_version)
+ with self.command_group('machinelearningservices code-version', machinelearningservices_code_version,
+ client_factory=cf_code_version) as g:
+ g.custom_command('list', 'machinelearningservices_code_version_list')
+ g.custom_show_command('show', 'machinelearningservices_code_version_show')
+ g.custom_command('create', 'machinelearningservices_code_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_code_versi'
+ 'on_update')
+ g.custom_command('delete', 'machinelearningservices_code_version_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_data_container
+ machinelearningservices_data_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._data_container'
+ 's_operations#DataContainersOperations.{}',
+ client_factory=cf_data_container)
+ with self.command_group('machinelearningservices data-container', machinelearningservices_data_container,
+ client_factory=cf_data_container) as g:
+ g.custom_command('list', 'machinelearningservices_data_container_list')
+ g.custom_show_command('show', 'machinelearningservices_data_container_show')
+ g.custom_command('create', 'machinelearningservices_data_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_data_conta'
+ 'iner_update')
+ g.custom_command('delete', 'machinelearningservices_data_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_data_version
+ machinelearningservices_data_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._data_versions_'
+ 'operations#DataVersionsOperations.{}',
+ client_factory=cf_data_version)
+ with self.command_group('machinelearningservices data-version', machinelearningservices_data_version,
+ client_factory=cf_data_version) as g:
+ g.custom_command('list', 'machinelearningservices_data_version_list')
+ g.custom_show_command('show', 'machinelearningservices_data_version_show')
+ g.custom_command('create', 'machinelearningservices_data_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_data_versi'
+ 'on_update')
+ g.custom_command('delete', 'machinelearningservices_data_version_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_datastore
+ machinelearningservices_datastore = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._datastores_ope'
+ 'rations#DatastoresOperations.{}',
+ client_factory=cf_datastore)
+ with self.command_group('machinelearningservices datastore', machinelearningservices_datastore,
+ client_factory=cf_datastore) as g:
+ g.custom_command('list', 'machinelearningservices_datastore_list')
+ g.custom_show_command('show', 'machinelearningservices_datastore_show')
+ g.custom_command('create', 'machinelearningservices_datastore_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_datastore_'
+ 'update')
+ g.custom_command('delete', 'machinelearningservices_datastore_delete', confirmation=True)
+ g.custom_command('list-secret', 'machinelearningservices_datastore_list_secret')
+
+ from azext_machinelearningservices.generated._client_factory import cf_environment_container
+ machinelearningservices_environment_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._environment_co'
+ 'ntainers_operations#EnvironmentContainersOperations.{}',
+ client_factory=cf_environment_container)
+ with self.command_group('machinelearningservices environment-container',
+ machinelearningservices_environment_container,
+ client_factory=cf_environment_container) as g:
+ g.custom_command('list', 'machinelearningservices_environment_container_list')
+ g.custom_show_command('show', 'machinelearningservices_environment_container_show')
+ g.custom_command('create', 'machinelearningservices_environment_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_environmen'
+ 't_container_update')
+ g.custom_command('delete', 'machinelearningservices_environment_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_environment_specification_version
+ machinelearningservices_environment_specification_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._environment_sp'
+ 'ecification_versions_operations#EnvironmentSpecificationVersionsOperations.{}',
+ client_factory=cf_environment_specification_version)
+ with self.command_group('machinelearningservices environment-specification-version',
+ machinelearningservices_environment_specification_version,
+ client_factory=cf_environment_specification_version) as g:
+ g.custom_command('list', 'machinelearningservices_environment_specification_version_list')
+ g.custom_show_command('show', 'machinelearningservices_environment_specification_version_show')
+ g.custom_command('create', 'machinelearningservices_environment_specification_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_environmen'
+ 't_specification_version_update')
+ g.custom_command('delete', 'machinelearningservices_environment_specification_version_delete',
+ confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_job
+ machinelearningservices_job = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._jobs_operation'
+ 's#JobsOperations.{}',
+ client_factory=cf_job)
+ with self.command_group('machinelearningservices job', machinelearningservices_job, client_factory=cf_job) as g:
+ g.custom_command('list', 'machinelearningservices_job_list')
+ g.custom_show_command('show', 'machinelearningservices_job_show')
+ g.custom_command('create', 'machinelearningservices_job_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_job_update'
+ '')
+ g.custom_command('delete', 'machinelearningservices_job_delete', supports_no_wait=True, confirmation=True)
+ g.custom_command('cancel', 'machinelearningservices_job_cancel')
+ g.custom_wait_command('wait', 'machinelearningservices_job_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_labeling_job
+ machinelearningservices_labeling_job = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._labeling_jobs_'
+ 'operations#LabelingJobsOperations.{}',
+ client_factory=cf_labeling_job)
+ with self.command_group('machinelearningservices labeling-job', machinelearningservices_labeling_job,
+ client_factory=cf_labeling_job) as g:
+ g.custom_command('list', 'machinelearningservices_labeling_job_list')
+ g.custom_show_command('show', 'machinelearningservices_labeling_job_show')
+ g.custom_command('create', 'machinelearningservices_labeling_job_create', supports_no_wait=True)
+ g.generic_update_command('update', setter_arg_name='body', setter_name='begin_create_or_update',
+ custom_func_name='machinelearningservices_labeling_job_update',
+ supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_labeling_job_delete', confirmation=True)
+ g.custom_command('export-label', 'machinelearningservices_labeling_job_export_label', supports_no_wait=True)
+ g.custom_command('pause', 'machinelearningservices_labeling_job_pause')
+ g.custom_command('resume', 'machinelearningservices_labeling_job_resume', supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_labeling_job_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_model_container
+ machinelearningservices_model_container = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._model_containe'
+ 'rs_operations#ModelContainersOperations.{}',
+ client_factory=cf_model_container)
+ with self.command_group('machinelearningservices model-container', machinelearningservices_model_container,
+ client_factory=cf_model_container) as g:
+ g.custom_command('list', 'machinelearningservices_model_container_list')
+ g.custom_show_command('show', 'machinelearningservices_model_container_show')
+ g.custom_command('create', 'machinelearningservices_model_container_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_model_cont'
+ 'ainer_update')
+ g.custom_command('delete', 'machinelearningservices_model_container_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_model_version
+ machinelearningservices_model_version = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._model_versions'
+ '_operations#ModelVersionsOperations.{}',
+ client_factory=cf_model_version)
+ with self.command_group('machinelearningservices model-version', machinelearningservices_model_version,
+ client_factory=cf_model_version) as g:
+ g.custom_command('list', 'machinelearningservices_model_version_list')
+ g.custom_show_command('show', 'machinelearningservices_model_version_show')
+ g.custom_command('create', 'machinelearningservices_model_version_create')
+ g.generic_update_command('update', setter_arg_name='body', custom_func_name='machinelearningservices_model_vers'
+ 'ion_update')
+ g.custom_command('delete', 'machinelearningservices_model_version_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_online_endpoint
+ machinelearningservices_online_endpoint = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._online_endpoin'
+ 'ts_operations#OnlineEndpointsOperations.{}',
+ client_factory=cf_online_endpoint)
+ with self.command_group('machinelearningservices online-endpoint', machinelearningservices_online_endpoint,
+ client_factory=cf_online_endpoint) as g:
+ g.custom_command('list', 'machinelearningservices_online_endpoint_list')
+ g.custom_show_command('show', 'machinelearningservices_online_endpoint_show')
+ g.custom_command('create', 'machinelearningservices_online_endpoint_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_online_endpoint_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_online_endpoint_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('get-token', 'machinelearningservices_online_endpoint_get_token')
+ g.custom_command('list-key', 'machinelearningservices_online_endpoint_list_key')
+ g.custom_command('regenerate-key', 'machinelearningservices_online_endpoint_regenerate_key',
+ supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_online_endpoint_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_online_deployment
+ machinelearningservices_online_deployment = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._online_deploym'
+ 'ents_operations#OnlineDeploymentsOperations.{}',
+ client_factory=cf_online_deployment)
+ with self.command_group('machinelearningservices online-deployment', machinelearningservices_online_deployment,
+ client_factory=cf_online_deployment) as g:
+ g.custom_command('list', 'machinelearningservices_online_deployment_list')
+ g.custom_show_command('show', 'machinelearningservices_online_deployment_show')
+ g.custom_command('create', 'machinelearningservices_online_deployment_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_online_deployment_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_online_deployment_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('get-log', 'machinelearningservices_online_deployment_get_log')
+ g.custom_wait_command('wait', 'machinelearningservices_online_deployment_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_feature
+ machinelearningservices_workspace_feature = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_feat'
+ 'ures_operations#WorkspaceFeaturesOperations.{}',
+ client_factory=cf_workspace_feature)
+ with self.command_group('machinelearningservices workspace-feature', machinelearningservices_workspace_feature,
+ client_factory=cf_workspace_feature) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_feature_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_sku
+ machinelearningservices_workspace_sku = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_skus'
+ '_operations#WorkspaceSkusOperations.{}',
+ client_factory=cf_workspace_sku)
+ with self.command_group('machinelearningservices workspace-sku', machinelearningservices_workspace_sku,
+ client_factory=cf_workspace_sku) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_sku_list')
+
+ with self.command_group('machinelearningservices', is_experimental=True):
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/custom.py b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
new file mode 100644
index 00000000000..53e512a26c0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
@@ -0,0 +1,2001 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=line-too-long
+# pylint: disable=too-many-lines
+# pylint: disable=unused-argument
+
+from knack.util import CLIError
+from azure.cli.core.util import sdk_no_wait
+
+
+def machinelearningservices_workspace_list(client,
+ resource_group_name=None,
+ skip=None):
+ if resource_group_name:
+ return client.list_by_resource_group(resource_group_name=resource_group_name,
+ skip=skip)
+ return client.list_by_subscription(skip=skip)
+
+
+def machinelearningservices_workspace_show(client,
+ resource_group_name,
+ workspace_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_create(client,
+ resource_group_name,
+ workspace_name,
+ location=None,
+ tags=None,
+ sku=None,
+ description=None,
+ friendly_name=None,
+ key_vault=None,
+ application_insights=None,
+ container_registry=None,
+ storage_account=None,
+ discovery_url=None,
+ hbi_workspace=None,
+ image_build_compute=None,
+ allow_public_access_when_behind_vnet=None,
+ shared_private_link_resources=None,
+ primary_user_assigned_identity=None,
+ collections_throughput=None,
+ status=None,
+ identity=None,
+ key_vault_properties=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ if hbi_workspace is None:
+ hbi_workspace = False
+ if allow_public_access_when_behind_vnet is None:
+ allow_public_access_when_behind_vnet = False
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['description'] = description
+ parameters['friendly_name'] = friendly_name
+ parameters['key_vault'] = key_vault
+ parameters['application_insights'] = application_insights
+ parameters['container_registry'] = container_registry
+ parameters['storage_account'] = storage_account
+ parameters['discovery_url'] = discovery_url
+ parameters['hbi_workspace'] = False if hbi_workspace is None else hbi_workspace
+ parameters['image_build_compute'] = image_build_compute
+ parameters['allow_public_access_when_behind_vnet'] = False if allow_public_access_when_behind_vnet is None else allow_public_access_when_behind_vnet
+ parameters['shared_private_link_resources'] = shared_private_link_resources
+ parameters['primary_user_assigned_identity'] = primary_user_assigned_identity
+ parameters['cosmos_db'] = {}
+ parameters['cosmos_db']['collections_throughput'] = collections_throughput
+ parameters['encryption'] = {}
+ parameters['encryption']['status'] = status
+ parameters['encryption']['identity'] = identity
+ parameters['encryption']['key_vault_properties'] = key_vault_properties
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_update(client,
+ resource_group_name,
+ workspace_name,
+ tags=None,
+ sku=None,
+ description=None,
+ friendly_name=None,
+ image_build_compute=None,
+ primary_user_assigned_identity=None,
+ collections_throughput=None,
+ type_=None,
+ user_assigned_identities=None):
+ parameters = {}
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['description'] = description
+ parameters['friendly_name'] = friendly_name
+ parameters['image_build_compute'] = image_build_compute
+ parameters['primary_user_assigned_identity'] = primary_user_assigned_identity
+ parameters['cosmos_db'] = {}
+ parameters['cosmos_db']['collections_throughput'] = collections_throughput
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.update(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_delete(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_notebook_access_token(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_notebook_access_token(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_notebook_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_notebook_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_storage_account_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_storage_account_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_prepare_notebook(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_prepare_notebook,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_resync_key(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_resync_keys,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_usage_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_virtual_machine_size_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_update(client,
+ location,
+ value=None,
+ quota_update_parameters_location=None):
+ parameters = {}
+ parameters['value'] = value
+ parameters['location'] = quota_update_parameters_location
+ return client.update(location=location,
+ parameters=parameters)
+
+
+def machinelearningservices_compute_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ skip=skip)
+
+
+def machinelearningservices_compute_show(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ properties=None,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ parameters = {}
+ parameters['properties'] = properties
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_compute_update(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ scale_settings=None,
+ no_wait=False):
+ parameters = {}
+ parameters['scale_settings'] = scale_settings
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_compute_delete(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ underlying_resource_action,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action)
+
+
+def machinelearningservices_compute_list_key(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_list_node(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_nodes(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_restart(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.restart(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_start(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_start,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_stop(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_stop,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_update_schedule(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ compute_start_stop=None):
+ parameters = {}
+ parameters['compute_start_stop'] = compute_start_stop
+ return client.update_schedules(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_private_endpoint_connection_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_private_endpoint_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_create(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ location=None,
+ tags=None,
+ sku=None,
+ private_link_service_connection_state=None,
+ type_=None,
+ user_assigned_identities=None):
+ properties = {}
+ properties['location'] = location
+ properties['tags'] = tags
+ properties['sku'] = sku
+ properties['private_link_service_connection_state'] = private_link_service_connection_state
+ properties['identity'] = {}
+ properties['identity']['type'] = type_
+ properties['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.create_or_update(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ properties=properties)
+
+
+def machinelearningservices_private_endpoint_connection_update(instance,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ location=None,
+ tags=None,
+ sku=None,
+ private_link_service_connection_state=None,
+ type_=None,
+ user_assigned_identities=None):
+ if location is not None:
+ instance.location = location
+ if tags is not None:
+ instance.tags = tags
+ if sku is not None:
+ instance.sku = sku
+ if private_link_service_connection_state is not None:
+ instance.private_link_service_connection_state = private_link_service_connection_state
+ if type_ is not None:
+ instance.identity.type = type_
+ if user_assigned_identities is not None:
+ instance.identity.user_assigned_identities = user_assigned_identities
+ return instance
+
+
+def machinelearningservices_private_endpoint_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_link_resource_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_connection_list(client,
+ resource_group_name,
+ workspace_name,
+ target=None,
+ category=None):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ target=target,
+ category=category)
+
+
+def machinelearningservices_workspace_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_workspace_connection_create(client,
+ resource_group_name,
+ workspace_name,
+ connection_name,
+ category=None,
+ target=None,
+ auth_type=None,
+ value=None):
+ parameters = {}
+ parameters['category'] = category
+ parameters['target'] = target
+ parameters['auth_type'] = auth_type
+ parameters['value'] = value
+ parameters['value_format'] = "JSON"
+ return client.create(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_batch_endpoint_list(client,
+ resource_group_name,
+ workspace_name,
+ count=None,
+ skip=None):
+ return client.list(count=count,
+ skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_batch_endpoint_show(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.get(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_batch_endpoint_create(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ location,
+ tags=None,
+ kind=None,
+ auth_mode=None,
+ description=None,
+ keys=None,
+ properties=None,
+ traffic=None,
+ type_=None,
+ user_assigned_identities=None):
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['properties'] = {}
+ body['properties']['auth_mode'] = auth_mode
+ body['properties']['description'] = description
+ body['properties']['keys'] = keys
+ body['properties']['properties'] = properties
+ body['properties']['traffic'] = traffic
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.create_or_update(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_batch_endpoint_update(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ kind=None,
+ location=None,
+ tags=None,
+ traffic=None,
+ type_=None,
+ user_assigned_identities=None):
+ body = {}
+ body['kind'] = kind
+ body['location'] = location
+ body['tags'] = tags
+ body['properties'] = {}
+ body['properties']['traffic'] = traffic
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.update(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_batch_endpoint_delete(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_batch_endpoint_list_key(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_batch_deployment_list(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skip=None):
+ return client.list(endpoint_name=endpoint_name,
+ order_by=order_by,
+ top=top,
+ skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_batch_deployment_show(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name):
+ return client.get(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_batch_deployment_create(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ location,
+ tags=None,
+ kind=None,
+ code_configuration=None,
+ description=None,
+ environment_id=None,
+ environment_variables=None,
+ error_threshold=None,
+ logging_level=None,
+ mini_batch_size=None,
+ data_path_asset_reference=None,
+ id_asset_reference=None,
+ output_path_asset_reference=None,
+ output_configuration=None,
+ partition_keys=None,
+ properties=None,
+ retry_settings=None,
+ instance_count=None,
+ instance_type=None,
+ is_local=None,
+ compute_configuration_location=None,
+ compute_configuration_properties=None,
+ target=None,
+ type_=None,
+ user_assigned_identities=None):
+ all_model = []
+ if data_path_asset_reference is not None:
+ all_model.append(data_path_asset_reference)
+ if id_asset_reference is not None:
+ all_model.append(id_asset_reference)
+ if output_path_asset_reference is not None:
+ all_model.append(output_path_asset_reference)
+ if len(all_model) > 1:
+ raise CLIError('at most one of data_path_asset_reference, id_asset_reference, output_path_asset_reference is '
+ 'needed for model!')
+ model = all_model[0] if len(all_model) == 1 else None
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['properties'] = {}
+ body['properties']['code_configuration'] = code_configuration
+ body['properties']['description'] = description
+ body['properties']['environment_id'] = environment_id
+ body['properties']['environment_variables'] = environment_variables
+ body['properties']['error_threshold'] = error_threshold
+ body['properties']['logging_level'] = logging_level
+ body['properties']['mini_batch_size'] = mini_batch_size
+ body['properties']['model'] = model
+ body['properties']['output_configuration'] = output_configuration
+ body['properties']['partition_keys'] = partition_keys
+ body['properties']['properties'] = properties
+ body['properties']['retry_settings'] = retry_settings
+ body['properties']['compute'] = {}
+ body['properties']['compute']['instance_count'] = instance_count
+ body['properties']['compute']['instance_type'] = instance_type
+ body['properties']['compute']['is_local'] = is_local
+ body['properties']['compute']['location'] = compute_configuration_location
+ body['properties']['compute']['properties'] = {}
+ body['properties']['compute']['properties']['properties'] = compute_configuration_properties
+ body['properties']['compute']['properties']['compute'] = {}
+ body['properties']['compute']['properties']['compute']['target'] = target
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.create_or_update(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_batch_deployment_update(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ kind=None,
+ location=None,
+ tags=None,
+ description=None,
+ type_=None,
+ user_assigned_identities=None):
+ body = {}
+ body['kind'] = kind
+ body['location'] = location
+ body['tags'] = tags
+ body['properties'] = {}
+ body['properties']['description'] = description
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.update(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_batch_deployment_delete(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None):
+ return client.list(skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ body = {}
+ body['properties'] = {}
+ body['properties']['description'] = description
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_code_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ if description is not None:
+ instance.properties.description = description
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_code_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skip=None):
+ return client.list(name=name,
+ order_by=order_by,
+ top=top,
+ skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_code_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ path,
+ datastore_id=None,
+ description=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None):
+ body = {}
+ body['properties'] = {}
+ body['properties']['datastore_id'] = datastore_id
+ body['properties']['description'] = description
+ body['properties']['is_anonymous'] = is_anonymous
+ body['properties']['path'] = path
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_code_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ path,
+ datastore_id=None,
+ description=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None):
+ if datastore_id is not None:
+ instance.properties.datastore_id = datastore_id
+ if description is not None:
+ instance.properties.description = description
+ if is_anonymous is not None:
+ instance.properties.is_anonymous = is_anonymous
+ if path is not None:
+ instance.properties.path = path
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_code_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None):
+ return client.list(skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ body = {}
+ body['properties'] = {}
+ body['properties']['description'] = description
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_data_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ if description is not None:
+ instance.properties.description = description
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_data_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skip=None,
+ tags=None):
+ return client.list(name=name,
+ order_by=order_by,
+ top=top,
+ skip=skip,
+ tags=tags,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_data_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ path,
+ dataset_type=None,
+ datastore_id=None,
+ description=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None):
+ body = {}
+ body['properties'] = {}
+ body['properties']['dataset_type'] = dataset_type
+ body['properties']['datastore_id'] = datastore_id
+ body['properties']['description'] = description
+ body['properties']['is_anonymous'] = is_anonymous
+ body['properties']['path'] = path
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_data_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ path,
+ dataset_type=None,
+ datastore_id=None,
+ description=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None):
+ if dataset_type is not None:
+ instance.properties.dataset_type = dataset_type
+ if datastore_id is not None:
+ instance.properties.datastore_id = datastore_id
+ if description is not None:
+ instance.properties.description = description
+ if is_anonymous is not None:
+ instance.properties.is_anonymous = is_anonymous
+ if path is not None:
+ instance.properties.path = path
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_data_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None,
+ count=None,
+ is_default=None,
+ names=None,
+ search_text=None,
+ order_by=None,
+ order_by_asc=None):
+ if count is None:
+ count = 30
+ if order_by_asc is None:
+ order_by_asc = False
+ return client.list(skip=skip,
+ count=count,
+ is_default=is_default,
+ names=names,
+ search_text=search_text,
+ order_by=order_by,
+ order_by_asc=order_by_asc,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ contents,
+ skip_validation=None,
+ description=None,
+ is_default=None,
+ linked_info=None,
+ properties=None,
+ tags=None):
+ if skip_validation is None:
+ skip_validation = False
+ body = {}
+ body['properties'] = {}
+ body['properties']['contents'] = contents
+ body['properties']['description'] = description
+ body['properties']['is_default'] = is_default
+ body['properties']['linked_info'] = linked_info
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ skip_validation=skip_validation,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_datastore_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ contents,
+ skip_validation=None,
+ description=None,
+ is_default=None,
+ linked_info=None,
+ properties=None,
+ tags=None):
+ if skip_validation is None:
+ skip_validation = False
+ if contents is not None:
+ instance.properties.contents = contents
+ if description is not None:
+ instance.properties.description = description
+ if is_default is not None:
+ instance.properties.is_default = is_default
+ if linked_info is not None:
+ instance.properties.linked_info = linked_info
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_datastore_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_datastore_list_secret(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.list_secrets(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None):
+ return client.list(skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ body = {}
+ body['properties'] = {}
+ body['properties']['description'] = description
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_environment_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ if description is not None:
+ instance.properties.description = description
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_environment_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_specification_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skip=None):
+ return client.list(name=name,
+ order_by=order_by,
+ top=top,
+ skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_specification_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_environment_specification_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ conda_file=None,
+ description=None,
+ docker_build=None,
+ docker_image=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None,
+ liveness_route=None,
+ readiness_route=None,
+ scoring_route=None):
+ all_docker = []
+ if docker_build is not None:
+ all_docker.append(docker_build)
+ if docker_image is not None:
+ all_docker.append(docker_image)
+ if len(all_docker) > 1:
+ raise CLIError('at most one of docker_build, docker_image is needed for docker!')
+ docker = all_docker[0] if len(all_docker) == 1 else None
+ body = {}
+ body['properties'] = {}
+ body['properties']['conda_file'] = conda_file
+ body['properties']['description'] = description
+ body['properties']['docker'] = docker
+ body['properties']['is_anonymous'] = is_anonymous
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ body['properties']['inference_container_properties'] = {}
+ body['properties']['inference_container_properties']['liveness_route'] = liveness_route
+ body['properties']['inference_container_properties']['readiness_route'] = readiness_route
+ body['properties']['inference_container_properties']['scoring_route'] = scoring_route
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_environment_specification_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ conda_file=None,
+ description=None,
+ docker_build=None,
+ docker_image=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None,
+ liveness_route=None,
+ readiness_route=None,
+ scoring_route=None):
+ all_docker = []
+ if docker_build is not None:
+ all_docker.append(docker_build)
+ if docker_image is not None:
+ all_docker.append(docker_image)
+ if len(all_docker) > 1:
+ raise CLIError('at most one of docker_build, docker_image is needed for docker!')
+ docker = all_docker[0] if len(all_docker) == 1 else None
+ if conda_file is not None:
+ instance.properties.conda_file = conda_file
+ if description is not None:
+ instance.properties.description = description
+ if docker is not None:
+ instance.properties.docker = docker
+ if is_anonymous is not None:
+ instance.properties.is_anonymous = is_anonymous
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ if liveness_route is not None:
+ instance.properties.inference_container_properties.liveness_route = liveness_route
+ if readiness_route is not None:
+ instance.properties.inference_container_properties.readiness_route = readiness_route
+ if scoring_route is not None:
+ instance.properties.inference_container_properties.scoring_route = scoring_route
+ return instance
+
+
+def machinelearningservices_environment_specification_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None,
+ job_type=None,
+ tags=None,
+ tag=None):
+ return client.list(skip=skip,
+ job_type=job_type,
+ tags=tags,
+ tag=tag,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_show(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.get(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_create(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ properties):
+ body = {}
+ body['properties'] = properties
+ return client.create_or_update(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_job_update(instance,
+ id_,
+ resource_group_name,
+ workspace_name,
+ properties):
+ if properties is not None:
+ instance.properties = properties
+ return instance
+
+
+def machinelearningservices_job_delete(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_job_cancel(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.cancel(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None,
+ count=None):
+ return client.list(skip=skip,
+ count=count,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_show(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ include_job_instructions=None,
+ include_label_categories=None):
+ return client.get(id=id_,
+ include_job_instructions=include_job_instructions,
+ include_label_categories=include_label_categories,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_create(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ job_type,
+ dataset_configuration=None,
+ description=None,
+ label_categories=None,
+ labeling_job_image_properties=None,
+ labeling_job_text_properties=None,
+ properties=None,
+ tags=None,
+ inferencing_compute_binding=None,
+ ml_assist_enabled=None,
+ training_compute_binding=None,
+ uri=None,
+ no_wait=False):
+ all_labeling_job_media_properties = []
+ if labeling_job_image_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_image_properties)
+ if labeling_job_text_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_text_properties)
+ if len(all_labeling_job_media_properties) > 1:
+ raise CLIError('at most one of labeling_job_image_properties, labeling_job_text_properties is needed for '
+ 'labeling_job_media_properties!')
+ labeling_job_media_properties = all_labeling_job_media_properties[0] if len(all_labeling_job_media_properties) == \
+ 1 else None
+ body = {}
+ body['properties'] = {}
+ body['properties']['dataset_configuration'] = dataset_configuration
+ body['properties']['description'] = description
+ body['properties']['job_type'] = job_type
+ body['properties']['label_categories'] = label_categories
+ body['properties']['labeling_job_media_properties'] = labeling_job_media_properties
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ body['properties']['ml_assist_configuration'] = {}
+ body['properties']['ml_assist_configuration']['inferencing_compute_binding'] = inferencing_compute_binding
+ body['properties']['ml_assist_configuration']['ml_assist_enabled'] = ml_assist_enabled
+ body['properties']['ml_assist_configuration']['training_compute_binding'] = training_compute_binding
+ body['properties']['job_instructions'] = {}
+ body['properties']['job_instructions']['uri'] = uri
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_labeling_job_update(instance,
+ id_,
+ resource_group_name,
+ workspace_name,
+ job_type,
+ dataset_configuration=None,
+ description=None,
+ label_categories=None,
+ labeling_job_image_properties=None,
+ labeling_job_text_properties=None,
+ properties=None,
+ tags=None,
+ inferencing_compute_binding=None,
+ ml_assist_enabled=None,
+ training_compute_binding=None,
+ uri=None,
+ no_wait=False):
+ all_labeling_job_media_properties = []
+ if labeling_job_image_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_image_properties)
+ if labeling_job_text_properties is not None:
+ all_labeling_job_media_properties.append(labeling_job_text_properties)
+ if len(all_labeling_job_media_properties) > 1:
+ raise CLIError('at most one of labeling_job_image_properties, labeling_job_text_properties is needed for '
+ 'labeling_job_media_properties!')
+ labeling_job_media_properties = all_labeling_job_media_properties[0] if len(all_labeling_job_media_properties) == \
+ 1 else None
+ if dataset_configuration is not None:
+ instance.properties.dataset_configuration = dataset_configuration
+ if description is not None:
+ instance.properties.description = description
+ if job_type is not None:
+ instance.properties.job_type = job_type
+ if label_categories is not None:
+ instance.properties.label_categories = label_categories
+ if labeling_job_media_properties is not None:
+ instance.properties.labeling_job_media_properties = labeling_job_media_properties
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ if inferencing_compute_binding is not None:
+ instance.properties.ml_assist_configuration.inferencing_compute_binding = inferencing_compute_binding
+ if ml_assist_enabled is not None:
+ instance.properties.ml_assist_configuration.ml_assist_enabled = ml_assist_enabled
+ if training_compute_binding is not None:
+ instance.properties.ml_assist_configuration.training_compute_binding = training_compute_binding
+ if uri is not None:
+ instance.properties.job_instructions.uri = uri
+ return instance
+
+
+def machinelearningservices_labeling_job_delete(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.delete(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_export_label(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ coco_export_summary=None,
+ csv_export_summary=None,
+ dataset_export_summary=None,
+ no_wait=False):
+ all_body = []
+ if coco_export_summary is not None:
+ all_body.append(coco_export_summary)
+ if csv_export_summary is not None:
+ all_body.append(csv_export_summary)
+ if dataset_export_summary is not None:
+ all_body.append(dataset_export_summary)
+ if len(all_body) > 1:
+ raise CLIError('at most one of coco_export_summary, csv_export_summary, dataset_export_summary is needed for '
+ 'body!')
+ if len(all_body) != 1:
+ raise CLIError('body is required. but none of coco_export_summary, csv_export_summary, dataset_export_summary '
+ 'is provided!')
+ body = all_body[0] if len(all_body) == 1 else None
+ return sdk_no_wait(no_wait,
+ client.begin_export_labels,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_labeling_job_pause(client,
+ id_,
+ resource_group_name,
+ workspace_name):
+ return client.pause(id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_labeling_job_resume(client,
+ id_,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_resume,
+ id=id_,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_container_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None,
+ count=None):
+ return client.list(skip=skip,
+ count=count,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_container_show(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_container_create(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ body = {}
+ body['properties'] = {}
+ body['properties']['description'] = description
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_model_container_update(instance,
+ name,
+ resource_group_name,
+ workspace_name,
+ description=None,
+ properties=None,
+ tags=None):
+ if description is not None:
+ instance.properties.description = description
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_model_container_delete(client,
+ name,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_version_list(client,
+ name,
+ resource_group_name,
+ workspace_name,
+ skip=None,
+ order_by=None,
+ top=None,
+ version=None,
+ description=None,
+ offset=None,
+ tags=None,
+ properties=None):
+ return client.list(name=name,
+ skip=skip,
+ order_by=order_by,
+ top=top,
+ version=version,
+ description=description,
+ offset=offset,
+ tags=tags,
+ properties=properties,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_version_show(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.get(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_model_version_create(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ path,
+ datastore_id=None,
+ description=None,
+ flavors=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None):
+ body = {}
+ body['properties'] = {}
+ body['properties']['datastore_id'] = datastore_id
+ body['properties']['description'] = description
+ body['properties']['flavors'] = flavors
+ body['properties']['is_anonymous'] = is_anonymous
+ body['properties']['path'] = path
+ body['properties']['properties'] = properties
+ body['properties']['tags'] = tags
+ return client.create_or_update(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_model_version_update(instance,
+ name,
+ version,
+ resource_group_name,
+ workspace_name,
+ path,
+ datastore_id=None,
+ description=None,
+ flavors=None,
+ is_anonymous=None,
+ properties=None,
+ tags=None):
+ if datastore_id is not None:
+ instance.properties.datastore_id = datastore_id
+ if description is not None:
+ instance.properties.description = description
+ if flavors is not None:
+ instance.properties.flavors = flavors
+ if is_anonymous is not None:
+ instance.properties.is_anonymous = is_anonymous
+ if path is not None:
+ instance.properties.path = path
+ if properties is not None:
+ instance.properties.properties = properties
+ if tags is not None:
+ instance.properties.tags = tags
+ return instance
+
+
+def machinelearningservices_model_version_delete(client,
+ name,
+ version,
+ resource_group_name,
+ workspace_name):
+ return client.delete(name=name,
+ version=version,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_list(client,
+ resource_group_name,
+ workspace_name,
+ name=None,
+ count=None,
+ compute_type=None,
+ skip=None,
+ tags=None,
+ properties=None,
+ order_by=None):
+ return client.list(name=name,
+ count=count,
+ compute_type=compute_type,
+ skip=skip,
+ tags=tags,
+ properties=properties,
+ order_by=order_by,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_show(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.get(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_create(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ location,
+ auth_mode,
+ tags=None,
+ kind=None,
+ description=None,
+ keys=None,
+ properties=None,
+ target=None,
+ traffic=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['properties'] = {}
+ body['properties']['auth_mode'] = auth_mode
+ body['properties']['description'] = description
+ body['properties']['keys'] = keys
+ body['properties']['properties'] = properties
+ body['properties']['target'] = target
+ body['properties']['traffic'] = traffic
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_endpoint_update(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ kind=None,
+ location=None,
+ tags=None,
+ traffic=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ body = {}
+ body['kind'] = kind
+ body['location'] = location
+ body['tags'] = tags
+ body['properties'] = {}
+ body['properties']['traffic'] = traffic
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_endpoint_delete(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_get_token(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.get_token(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_list_key(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_endpoint_regenerate_key(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ key_type,
+ key_value=None,
+ no_wait=False):
+ body = {}
+ body['key_type'] = key_type
+ body['key_value'] = key_value
+ return sdk_no_wait(no_wait,
+ client.begin_regenerate_keys,
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_deployment_list(client,
+ endpoint_name,
+ resource_group_name,
+ workspace_name,
+ order_by=None,
+ top=None,
+ skip=None):
+ return client.list(endpoint_name=endpoint_name,
+ order_by=order_by,
+ top=top,
+ skip=skip,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_deployment_show(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name):
+ return client.get(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_deployment_create(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ location,
+ properties,
+ tags=None,
+ kind=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ body = {}
+ body['tags'] = tags
+ body['location'] = location
+ body['kind'] = kind
+ body['properties'] = properties
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_deployment_update(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ kind=None,
+ location=None,
+ properties=None,
+ tags=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ body = {}
+ body['kind'] = kind
+ body['location'] = location
+ body['properties'] = properties
+ body['tags'] = tags
+ body['identity'] = {}
+ body['identity']['type'] = type_
+ body['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_online_deployment_delete(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_online_deployment_get_log(client,
+ endpoint_name,
+ deployment_name,
+ resource_group_name,
+ workspace_name,
+ container_type=None,
+ tail=None):
+ body = {}
+ body['container_type'] = container_type
+ body['tail'] = tail
+ return client.get_logs(endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body)
+
+
+def machinelearningservices_workspace_feature_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_sku_list(client):
+ return client.list()
diff --git a/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
new file mode 100644
index 00000000000..70488e93851
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
@@ -0,0 +1,116 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+import inspect
+import logging
+import os
+import sys
+import traceback
+import datetime as dt
+
+from azure.core.exceptions import AzureError
+from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError
+
+
+logger = logging.getLogger('azure.cli.testsdk')
+logger.addHandler(logging.StreamHandler())
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+exceptions = []
+test_map = dict()
+SUCCESSED = "successed"
+FAILED = "failed"
+
+
+def try_manual(func):
+ def import_manual_function(origin_func):
+ from importlib import import_module
+ decorated_path = inspect.getfile(origin_func).lower()
+ module_path = __path__[0].lower()
+ if not decorated_path.startswith(module_path):
+ raise Exception("Decorator can only be used in submodules!")
+ manual_path = os.path.join(
+ decorated_path[module_path.rfind(os.path.sep) + 1:])
+ manual_file_path, manual_file_name = os.path.split(manual_path)
+ module_name, _ = os.path.splitext(manual_file_name)
+ manual_module = "..manual." + \
+ ".".join(manual_file_path.split(os.path.sep) + [module_name, ])
+ return getattr(import_module(manual_module, package=__name__), origin_func.__name__)
+
+ def get_func_to_call():
+ func_to_call = func
+ try:
+ func_to_call = import_manual_function(func)
+ logger.info("Found manual override for %s(...)", func.__name__)
+ except (ImportError, AttributeError):
+ pass
+ return func_to_call
+
+ def wrapper(*args, **kwargs):
+ func_to_call = get_func_to_call()
+ logger.info("running %s()...", func.__name__)
+ try:
+ test_map[func.__name__] = dict()
+ test_map[func.__name__]["result"] = SUCCESSED
+ test_map[func.__name__]["error_message"] = ""
+ test_map[func.__name__]["error_stack"] = ""
+ test_map[func.__name__]["error_normalized"] = ""
+ test_map[func.__name__]["start_dt"] = dt.datetime.utcnow()
+ ret = func_to_call(*args, **kwargs)
+ except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit,
+ JMESPathCheckAssertionError) as e:
+ use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE")
+ if use_exception_cache is None or use_exception_cache.lower() != "true":
+ raise
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ test_map[func.__name__]["result"] = FAILED
+ test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500]
+ test_map[func.__name__]["error_stack"] = traceback.format_exc().replace(
+ "\r\n", " ").replace("\n", " ")[:500]
+ logger.info("--------------------------------------")
+ logger.info("step exception: %s", e)
+ logger.error("--------------------------------------")
+ logger.error("step exception in %s: %s", func.__name__, e)
+ logger.info(traceback.format_exc())
+ exceptions.append((func.__name__, sys.exc_info()))
+ else:
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ return ret
+
+ if inspect.isclass(func):
+ return get_func_to_call()
+ return wrapper
+
+
+def calc_coverage(filename):
+ filename = filename.split(".")[0]
+ coverage_name = filename + "_coverage.md"
+ with open(coverage_name, "w") as f:
+ f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n")
+ total = len(test_map)
+ covered = 0
+ for k, v in test_map.items():
+ if not k.startswith("step_"):
+ total -= 1
+ continue
+ if v["result"] == SUCCESSED:
+ covered += 1
+ f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|"
+ "{end_dt}|\n".format(step_name=k, **v))
+ f.write("Coverage: {}/{}\n".format(covered, total))
+ print("Create coverage\n", file=sys.stderr)
+
+
+def raise_if():
+ if exceptions:
+ if len(exceptions) <= 1:
+ raise exceptions[0][1][1]
+ message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1]))
+ message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]])
+ raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2])
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
new file mode 100644
index 00000000000..237173dd30d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
@@ -0,0 +1,1813 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+from .. import try_manual
+
+
+# EXAMPLE: /Workspaces/put/Create Workspace
+@try_manual
+def step_workspace_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace create '
+ '--identity type="SystemAssigned,UserAssigned" userAssignedIdentities={{"/subscriptions/00000000-1111-2222'
+ '-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentiti'
+ 'es/testuai":{{}}}} '
+ '--location "eastus2euap" '
+ '--description "test description" '
+ '--application-insights "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.insights'
+ '/components/testinsights" '
+ '--container-registry "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ContainerR'
+ 'egistry/registries/testRegistry" '
+ '--identity user-assigned-identity="/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microso'
+ 'ft.ManagedIdentity/userAssignedIdentities/testuai" '
+ '--key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/'
+ 'aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/'
+ 'providers/Microsoft.KeyVault/vaults/testkv" '
+ '--status "Enabled" '
+ '--friendly-name "HelloName" '
+ '--hbi-workspace false '
+ '--key-vault "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vaults/tes'
+ 'tkv" '
+ '--shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/{subscript'
+ 'ion_id}/resourceGroups/{rg}/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkRes'
+ 'ources/{myPrivateLinkResource}" group-id="{myPrivateLinkResource}" request-message="Please approve" '
+ 'status="Approved" '
+ '--storage-account "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storage/sto'
+ 'rageAccounts/{sa}" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=[])
+ test.cmd('az machinelearningservices workspace wait --created '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspace
+@try_manual
+def step_workspace_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace show '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by Resource Group
+@try_manual
+def step_workspace_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by subscription
+@try_manual
+def step_workspace_list2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '-g ""',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/patch/Update Workspace
+@try_manual
+def step_workspace_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace update '
+ '--description "new description" '
+ '--friendly-name "New friendly name" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/List Workspace Keys
+@try_manual
+def step_workspace_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Prepare Notebook
+@try_manual
+def step_workspace_prepare_notebook(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace prepare-notebook '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Resync Workspace Keys
+@try_manual
+def step_workspace_resync_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace resync-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchDeployments/put/CreateOrUpdate Batch Deployment.
+@try_manual
+def step_batch_deployment_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-deployment create '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--location "string" '
+ '--properties description="string" codeConfiguration={{"codeId":"/subscriptions/00000000-1111-2222-3333-44'
+ '4444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testw'
+ 'orkspace/codes/testcode/versions/1","scoringScript":"score.py"}} compute={{"instanceCount":0,"instanceTyp'
+ 'e":"string","isLocal":false,"location":"string","properties":{{"additionalProp1":"string","additionalProp'
+ '2":"string","additionalProp3":"string"}},"target":"/subscriptions/00000000-1111-2222-3333-444444444444/re'
+ 'sourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/comp'
+ 'utes/testcompute"}} environmentId="/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Micro'
+ 'soft.MachineLearningServices/workspaces/{myWorkspace}/environments/myenv" environmentVariables={{"additio'
+ 'nalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} errorThreshold=0 '
+ 'loggingLevel="Info" miniBatchSize=0 model={{"assetId":"/subscriptions/00000000-1111-2222-3333-44444444444'
+ '4/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/'
+ 'models/testmodel/versions/1","referenceType":"Id"}} outputConfiguration={{"appendRowFileName":"string","o'
+ 'utputAction":"SummaryOnly"}} partitionKeys="string" properties={{"additionalProp1":"string","additionalPr'
+ 'op2":"string","additionalProp3":"string"}} retrySettings={{"maxRetries":0,"timeout":"string"}} '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testBatchDeployment" '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchDeployments/get/Get Batch Deployment.
+@try_manual
+def step_batch_deployment_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-deployment show '
+ '--deployment-name "testBatchDeployment" '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchDeployments/get/List Batch Deployment.
+@try_manual
+def step_batch_deployment_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-deployment list '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchDeployments/patch/Update Batch Deployment.
+@try_manual
+def step_batch_deployment_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-deployment update '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testBatchDeployment" '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchDeployments/delete/Delete Batch Deployment.
+@try_manual
+def step_batch_deployment_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-deployment delete -y '
+ '--deployment-name "testBatchDeployment" '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchEndpoints/put/CreateOrUpdate Batch Endpoint.
+@try_manual
+def step_batch_endpoint_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-endpoint create '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--location "string" '
+ '--properties description="string" authMode="AMLToken" keys={{"primaryKey":"string","secondaryKey":"string'
+ '"}} properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'traffic={{"myDeployment1":0,"myDeployment2":1}} '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchEndpoints/get/Get Batch Endpoint.
+@try_manual
+def step_batch_endpoint_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-endpoint show '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchEndpoints/get/List Batch Endpoint.
+@try_manual
+def step_batch_endpoint_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-endpoint list '
+ '--count 1 '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchEndpoints/patch/Update Batch Endpoint.
+@try_manual
+def step_batch_endpoint_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-endpoint update '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchEndpoints/post/ListKeys Batch Endpoint.
+@try_manual
+def step_batch_endpoint_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-endpoint list-key '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /BatchEndpoints/delete/Delete Batch Endpoint.
+@try_manual
+def step_batch_endpoint_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices batch-endpoint delete -y '
+ '--endpoint-name "testBatchEndpoint" '
+ '--resource-group "{rg_8}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/put/CreateOrUpdate Code Container.
+@try_manual
+def step_code_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container create '
+ '--name "testContainer" '
+ '--properties description="string" tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/get/Get Code Container.
+@try_manual
+def step_code_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container show '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/get/List Code Container.
+@try_manual
+def step_code_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeContainers/delete/Delete Code Container.
+@try_manual
+def step_code_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-container delete -y '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/put/CreateOrUpdate Code Version.
+@try_manual
+def step_code_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version create '
+ '--name "testContainer" '
+ '--properties path="path/to/file.py" description="string" datastoreId="/subscriptions/{subscription_id}/re'
+ 'sourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/datastores/{myDa'
+ 'tastore}" isAnonymous=true properties={{"additionalProp1":"string","additionalProp2":"string","additional'
+ 'Prop3":"string"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"'
+ '}} '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/get/Get Code Version.
+@try_manual
+def step_code_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version show '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/get/List Code Version.
+@try_manual
+def step_code_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version list '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /CodeVersions/delete/Delete Code Version.
+@try_manual
+def step_code_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices code-version delete -y '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create a AML Compute
+@try_manual
+def step_compute_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{{\\"enableNodePublicIp\\":true,\\"is'
+ 'olatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\'
+ '"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\'
+ '"}},\\"virtualMachineImage\\":{{\\"id\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_5}/provid'
+ 'ers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"}},\\"vmPriority'
+ '\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create a DataFactory Compute
+@try_manual
+def step_compute_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"DataFactory\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create AKS Compute
+@try_manual
+def step_compute_create3(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"AKS\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create an ComputeInstance Compute
+@try_manual
+def step_compute_create4(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{{\\"applicationSharingPolicy\\"'
+ ':\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings'
+ '\\":{{\\"assignedUser\\":{{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00'
+ '000000-0000-0000-0000-000000000000\\"}}}},\\"sshSettings\\":{{\\"sshPublicAccess\\":\\"Disabled\\"}},\\"s'
+ 'ubnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create an ComputeInstance Compute with minimal inputs
+@try_manual
+def step_compute_create5(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{{\\"vmSize\\":\\"STANDARD_NC6\\'
+ '"}}}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create an ComputeInstance Compute with Schedules
+@try_manual
+def step_compute_create6(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{{\\"applicationSharingPolicy\\"'
+ ':\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings'
+ '\\":{{\\"assignedUser\\":{{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00'
+ '000000-0000-0000-0000-000000000000\\"}}}},\\"schedules\\":{{\\"computeStartStop\\":[{{\\"action\\":\\"Sto'
+ 'p\\",\\"cron\\":{{\\"expression\\":\\"0 18 * * *\\",\\"startTime\\":\\"2021-04-23T01:30:00\\",\\"timeZone'
+ '\\":\\"Pacific Standard Time\\"}},\\"status\\":\\"Enabled\\",\\"triggerType\\":\\"Cron\\"}}]}},\\"sshSett'
+ 'ings\\":{{\\"sshPublicAccess\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":'
+ '\\"STANDARD_NC6\\"}}}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/get/Get a AKS Compute
+@try_manual
+def step_compute_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute show '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/get/Get a AML Compute
+@try_manual
+def step_compute_show2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ return step_compute_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /Compute/get/Get an ComputeInstance
+@try_manual
+def step_compute_show3(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ return step_compute_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /Compute/get/Get Computes
+@try_manual
+def step_compute_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/patch/Update a AmlCompute Compute
+@try_manual
+def step_compute_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute update '
+ '--name "{myCompute}" '
+ '--scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Get compute nodes information for a compute
+@try_manual
+def step_compute_list_node(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute list-node '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/List AKS Compute Keys
+@try_manual
+def step_compute_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute list-key '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Restart ComputeInstance Compute
+@try_manual
+def step_compute_restart(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute restart '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Start ComputeInstance Compute
+@try_manual
+def step_compute_start(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute start '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Stop ComputeInstance Compute
+@try_manual
+def step_compute_stop(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute stop '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Update schedules of ComputeInstance
+@try_manual
+def step_compute_update_schedule(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute update-schedule '
+ '--name "{myCompute}" '
+ '--compute-start-stop "[{{\\"action\\":\\"Start\\",\\"recurrence\\":{{\\"frequency\\":\\"Day\\",\\"interva'
+ 'l\\":1,\\"schedule\\":{{\\"hours\\":[18],\\"minutes\\":[30],\\"weekDays\\":null}},\\"startTime\\":\\"2021'
+ '-04-23T01:30:00\\",\\"timeZone\\":\\"Pacific Standard Time\\"}},\\"status\\":\\"Enabled\\",\\"triggerType'
+ '\\":\\"Recurrence\\"}}]" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/delete/Delete Compute
+@try_manual
+def step_compute_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute delete -y '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--underlying-resource-action "Delete" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/put/CreateOrUpdate Data Container.
+@try_manual
+def step_data_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container create '
+ '--name "datacontainer123" '
+ '--properties description="string" properties={{"properties1":"value1","properties2":"value2"}} '
+ 'tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/get/Get Data Container.
+@try_manual
+def step_data_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container show '
+ '--name "datacontainer123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/get/List Data Container.
+@try_manual
+def step_data_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataContainers/delete/Delete Data Container.
+@try_manual
+def step_data_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-container delete -y '
+ '--name "datacontainer123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/put/CreateOrUpdate datastore (Azure Data Lake Gen1 w/ ServicePrincipal).
+@try_manual
+def step_datastore_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore create '
+ '--name "testDatastore" '
+ '--properties description="string" contents={{"contentsType":"AzureDataLakeGen1","credentials":{{"authorit'
+ 'yUrl":"string","clientId":"00000000-1111-2222-3333-444444444444","credentialsType":"ServicePrincipal","re'
+ 'sourceUri":"string","secrets":{{"clientSecret":"string","secretsType":"ServicePrincipal"}},"tenantId":"00'
+ '000000-1111-2222-3333-444444444444"}},"storeName":"testStore"}} isDefault=true '
+ 'linkedInfo={{"linkedId":"string","linkedResourceName":"string","origin":"Synapse"}} '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/put/CreateOrUpdate datastore (Azure Data Lake Gen2 w/ Service Principal).
+@try_manual
+def step_datastore_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore create '
+ '--name "testDatastore" '
+ '--properties description="string" contents={{"accountName":"string","containerName":"string","contentsTyp'
+ 'e":"AzureBlob","credentials":{{"authorityUrl":"string","clientId":"00000000-1111-2222-3333-444444444444",'
+ '"credentialsType":"ServicePrincipal","resourceUri":"string","secrets":{{"clientSecret":"string","secretsT'
+ 'ype":"ServicePrincipal"}},"tenantId":"00000000-1111-2222-3333-444444444444"}},"endpoint":"core.windows.ne'
+ 't","protocol":"https"}} isDefault=true linkedInfo={{"linkedId":"string","linkedResourceName":"string","or'
+ 'igin":"Synapse"}} properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"s'
+ 'tring"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/put/CreateOrUpdate datastore (Azure File store w/ AccountKey).
+@try_manual
+def step_datastore_create3(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore create '
+ '--name "testDatastore" '
+ '--properties description="string" contents={{"accountName":"string","containerName":"string","contentsTyp'
+ 'e":"AzureFile","credentials":{{"credentialsType":"AccountKey","secrets":{{"key":"string","secretsType":"A'
+ 'ccountKey"}}}},"endpoint":"core.windows.net","protocol":"https"}} isDefault=true '
+ 'linkedInfo={{"linkedId":"string","linkedResourceName":"string","origin":"Synapse"}} '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/put/CreateOrUpdate datastore (Azure Postgre SQL w/ SQL Admin).
+@try_manual
+def step_datastore_create4(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore create '
+ '--name "testDatastore" '
+ '--properties description="string" contents={{"contentsType":"AzurePostgreSql","credentials":{{"credential'
+ 'sType":"SqlAdmin","secrets":{{"password":"string","secretsType":"SqlAdmin"}},"userId":"string"}},"databas'
+ 'eName":"string","enableSSL":true,"endpoint":"string","portNumber":123,"serverName":"string"}} '
+ 'isDefault=true linkedInfo={{"linkedId":"string","linkedResourceName":"string","origin":"Synapse"}} '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/put/CreateOrUpdate datastore (Azure SQL Database w/ SQL Admin).
+@try_manual
+def step_datastore_create5(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore create '
+ '--name "testDatastore" '
+ '--properties description="string" contents={{"contentsType":"AzureSqlDatabase","credentials":{{"credentia'
+ 'lsType":"SqlAdmin","secrets":{{"password":"string","secretsType":"SqlAdmin"}},"userId":"string"}},"databa'
+ 'seName":"string","endpoint":"string","portNumber":123,"serverName":"string"}} isDefault=true '
+ 'linkedInfo={{"linkedId":"string","linkedResourceName":"string","origin":"Synapse"}} '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/put/CreateOrUpdate datastore (AzureBlob w/ AccountKey).
+@try_manual
+def step_datastore_create6(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore create '
+ '--name "testDatastore" '
+ '--properties description="string" contents={{"accountName":"string","containerName":"string","contentsTyp'
+ 'e":"AzureBlob","credentials":{{"credentialsType":"AccountKey","secrets":{{"key":"string","secretsType":"A'
+ 'ccountKey"}}}},"endpoint":"core.windows.net","protocol":"https"}} isDefault=true '
+ 'linkedInfo={{"linkedId":"string","linkedResourceName":"string","origin":"Synapse"}} '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/get/Get datastore.
+@try_manual
+def step_datastore_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore show '
+ '--name "testDatastore" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/get/List datastores.
+@try_manual
+def step_datastore_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/post/Get datastore secrets.
+@try_manual
+def step_datastore_list_secret(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore list-secret '
+ '--name "testDatastore" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/put/CreateOrUpdate Data Version.
+@try_manual
+def step_data_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version create '
+ '--name "dataset123" '
+ '--properties path="path/to/file.csv" description="string" datasetType="Simple" '
+ 'datastoreId="/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningSe'
+ 'rvices/workspaces/{myWorkspace}/datastores/{myDatastore}" isAnonymous=true '
+ 'properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/get/Get Data Version.
+@try_manual
+def step_data_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version show '
+ '--name "dataset123" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/get/List Data Version.
+@try_manual
+def step_data_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version list '
+ '--name "dataset123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataVersions/delete/Delete Data Version.
+@try_manual
+def step_data_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices data-version delete -y '
+ '--name "dataset123" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /Datastores/delete/Delete datastore.
+@try_manual
+def step_datastore_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices datastore delete -y '
+ '--name "testDatastore" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/put/CreateOrUpdate Environment Container.
+@try_manual
+def step_environment_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container create '
+ '--name "testEnvironment" '
+ '--properties description="string" properties={{"additionalProp1":"string","additionalProp2":"string","add'
+ 'itionalProp3":"string"}} tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"'
+ 'string"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/get/Get Environment Container.
+@try_manual
+def step_environment_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container show '
+ '--name "testEnvironment" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/get/List Environment Container.
+@try_manual
+def step_environment_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentContainers/delete/Delete Environment Container.
+@try_manual
+def step_environment_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-container delete -y '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/put/CreateOrUpdate Environment Specification Version.
+@try_manual
+def step_environment_specification_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version create '
+ '--name "testEnvironment" '
+ '--properties description="string" condaFile="channels:\\n- defaults\\ndependencies:\\n- python '
+ 'docker={{"dockerSpecificationType":"Build","dockerfile":"FROM myimage"}} properties={{"additionalProp1":"'
+ 'string","additionalProp2":"string","additionalProp3":"string"}} tags={{"additionalProp1":"string","additi'
+ 'onalProp2":"string","additionalProp3":"string"}} '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/get/Get Environment Specification Version.
+@try_manual
+def step_environment_specification_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version show '
+ '--name "testEnvironment" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/get/List Environment Specification Version.
+@try_manual
+def step_environment_specification_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version list '
+ '--name "testEnvironment" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /EnvironmentSpecificationVersions/delete/Delete Environment Specification Version.
+@try_manual
+def step_environment_specification_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices environment-specification-version delete -y '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/put/CreateOrUpdate Command Job.
+@try_manual
+def step_job_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job create '
+ '--properties "{{\\"description\\":\\"string\\",\\"codeId\\":\\"/subscriptions/{subscription_id}/resourceG'
+ 'roups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/codes/mycode/versions/1'
+ '\\",\\"command\\":\\"python file.py test\\",\\"compute\\":{{\\"instanceCount\\":1,\\"target\\":\\"/subscr'
+ 'iptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{m'
+ 'yWorkspace}/computes/mycompute\\"}},\\"distribution\\":{{\\"distributionType\\":\\"PyTorch\\",\\"processC'
+ 'ount\\":2}},\\"environmentId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Micro'
+ 'soft.MachineLearningServices/workspaces/{myWorkspace}/environments/AzureML-Tutorial/versions/1\\",\\"envi'
+ 'ronmentVariables\\":{{\\"MY_ENV_VAR1\\":\\"string\\",\\"MY_ENV_VAR2\\":\\"string\\"}},\\"experimentName\\'
+ '":\\"myExperiment\\",\\"identity\\":{{\\"identityType\\":\\"AMLToken\\"}},\\"inputDataBindings\\":{{\\"te'
+ 'st\\":{{\\"dataId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.Machin'
+ 'eLearningServices/workspaces/{myWorkspace}/data/mydataset/versions/1\\",\\"pathOnCompute\\":\\"path/on/co'
+ 'mpute\\"}}}},\\"jobType\\":\\"Command\\",\\"outputDataBindings\\":{{\\"test\\":{{\\"datastoreId\\":\\"/su'
+ 'bscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspace'
+ 's/{myWorkspace}/datastore/{{{{myDatastore}}}}\\",\\"pathOnCompute\\":\\"path/on/compute\\"}}}},\\"propert'
+ 'ies\\":{{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"s'
+ 'tring\\"}},\\"tags\\":{{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"addition'
+ 'alProp3\\":\\"string\\"}},\\"timeout\\":\\"PT1M\\"}}" '
+ '--id "testJob" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/put/CreateOrUpdate Sweep Job.
+@try_manual
+def step_job_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job create '
+ '--properties "{{\\"description\\":\\"string\\",\\"algorithm\\":\\"Grid\\",\\"compute\\":{{\\"instanceCoun'
+ 't\\":1,\\"target\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.Machine'
+ 'LearningServices/workspaces/{myWorkspace}/computes/mycompute\\"}},\\"identity\\":{{\\"identityType\\":\\"'
+ 'AMLToken\\"}},\\"jobType\\":\\"Sweep\\",\\"maxConcurrentTrials\\":1,\\"maxTotalTrials\\":1,\\"objective\\'
+ '":{{\\"goal\\":\\"Minimize\\",\\"primaryMetric\\":\\"string\\"}},\\"properties\\":{{\\"additionalProp1\\"'
+ ':\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}},\\"searchSpace\\":{'
+ '{\\"name\\":{{}}}},\\"tags\\":{{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"'
+ 'additionalProp3\\":\\"string\\"}},\\"timeout\\":\\"PT1M\\",\\"trial\\":{{\\"codeId\\":\\"/subscriptions/{'
+ 'subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspac'
+ 'e}/codes/mycode/versions/1\\",\\"command\\":\\"python file.py test\\",\\"distribution\\":{{\\"distributio'
+ 'nType\\":\\"PyTorch\\",\\"processCount\\":2}},\\"environmentId\\":\\"/subscriptions/{subscription_id}/res'
+ 'ourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/environments/Azur'
+ 'eML-Tutorial/versions/1\\",\\"environmentVariables\\":{{\\"MY_ENV_VAR1\\":\\"string\\",\\"MY_ENV_VAR2\\":'
+ '\\"string\\"}},\\"inputDataBindings\\":{{\\"test\\":{{\\"dataId\\":\\"/subscriptions/{subscription_id}/re'
+ 'sourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/data/mydataset/v'
+ 'ersions/1\\",\\"pathOnCompute\\":\\"path/on/compute\\"}}}},\\"outputDataBindings\\":{{\\"test\\":{{\\"dat'
+ 'astoreId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearning'
+ 'Services/workspaces/{myWorkspace}/datastore/{{{{myDatastore}}}}\\",\\"pathOnCompute\\":\\"path/on/compute'
+ '\\"}}}},\\"timeout\\":\\"PT1M\\"}}}}" '
+ '--id "testJob" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/get/Get Command Job.
+@try_manual
+def step_job_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job show '
+ '--id "testJob" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/get/Get Sweep Job.
+@try_manual
+def step_job_show2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ return step_job_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /Jobs/get/List Command Job.
+@try_manual
+def step_job_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job list '
+ '--job-type "Command" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/get/List Sweep Job.
+@try_manual
+def step_job_list2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job list '
+ '--job-type "Sweep" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/post/Cancel Job.
+@try_manual
+def step_job_cancel(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job cancel '
+ '--id "testJob" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Jobs/delete/Delete Job.
+@try_manual
+def step_job_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices job delete -y '
+ '--id "testJob" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/put/CreateOrUpdate Labeling Job.
+@try_manual
+def step_labeling_job_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job create '
+ '--properties description="string" datasetConfiguration={{"assetName":"myAsset","datasetVersion":"1","incr'
+ 'ementalDatasetRefreshEnabled":true}} jobInstructions={{"uri":"link/to/instructions"}} jobType="Labeling" '
+ 'labelCategories={{"myCategory1":{{"allowMultiSelect":true,"classes":{{"myLabelClass1":{{"displayName":"my'
+ 'LabelClass1","subclasses":{{}}}},"myLabelClass2":{{"displayName":"myLabelClass2","subclasses":{{}}}}}},"d'
+ 'isplayName":"myCategory1Title"}},"myCategory2":{{"allowMultiSelect":true,"classes":{{"myLabelClass1":{{"d'
+ 'isplayName":"myLabelClass1","subclasses":{{}}}},"myLabelClass2":{{"displayName":"myLabelClass2","subclass'
+ 'es":{{}}}}}},"displayName":"myCategory2Title"}}}} labelingJobMediaProperties={{"mediaType":"Image"}} '
+ 'mlAssistConfiguration={{"inferencingComputeBinding":{{"instanceCount":1,"target":"/subscriptions/00000000'
+ '-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningService'
+ 's/workspaces/testworkspace/computes/myscoringcompute"}},"mlAssistEnabled":true,"trainingComputeBinding":{'
+ '{"instanceCount":1,"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceG'
+ 'roup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mytrainingcompute'
+ '"}}}} properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'tags={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/get/Get Labeling Job.
+@try_manual
+def step_labeling_job_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job show '
+ '--id "testLabelingJob" '
+ '--include-job-instructions true '
+ '--include-label-categories true '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/get/List Labeling Job.
+@try_manual
+def step_labeling_job_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job list '
+ '--count "10" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/post/ExportLabels Labeling Job.
+@try_manual
+def step_labeling_job_export_label(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job export-label '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/post/Pause Labeling Job.
+@try_manual
+def step_labeling_job_pause(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job pause '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/post/Resume Labeling Job.
+@try_manual
+def step_labeling_job_resume(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job resume '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /LabelingJobs/delete/Delete Labeling Job.
+@try_manual
+def step_labeling_job_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices labeling-job delete -y '
+ '--id "testLabelingJob" '
+ '--resource-group "{rg}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/put/CreateOrUpdate Model Container.
+@try_manual
+def step_model_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container create '
+ '--name "testContainer" '
+ '--properties description="Model container description" tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/get/Get Model Container.
+@try_manual
+def step_model_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container show '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/get/List Model Container.
+@try_manual
+def step_model_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelContainers/delete/Delete Model Container.
+@try_manual
+def step_model_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-container delete -y '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/put/CreateOrUpdate Model Version.
+@try_manual
+def step_model_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version create '
+ '--name "testContainer" '
+ '--properties path="path/in/datastore" description="Model version description" '
+ 'datastoreId="/subscriptions/{subscription_id}/resourceGroups/{rg_3}/providers/Microsoft.MachineLearningSe'
+ 'rvices/workspaces/{myWorkspace6}/datastores/{myDatastore2}" flavors={{"python_function":{{"data":{{"loade'
+ 'r_module":"myLoaderModule"}}}}}} properties={{"prop1":"value1","prop2":"value2"}} '
+ 'tags={{"tag1":"value1","tag2":"value2"}} '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/get/Get Model Version.
+@try_manual
+def step_model_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version show '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/get/List Model Version.
+@try_manual
+def step_model_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version list '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--version "1" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /ModelVersions/delete/Delete Model Version.
+@try_manual
+def step_model_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices model-version delete -y '
+ '--name "testContainer" '
+ '--resource-group "{rg_3}" '
+ '--version "999" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/put/CreateOrUpdate K8S Online Deployment.
+@try_manual
+def step_online_deployment_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment create '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--location "string" '
+ '--properties "{{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfiguration\\":{{\\'
+ '"codeId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningS'
+ 'ervices/workspaces/{myWorkspace}/codes/code123/versions/1\\",\\"scoringScript\\":\\"string\\"}},\\"contai'
+ 'nerResourceRequirements\\":{{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\"memoryInGBLimit\\":64}},'
+ '\\"endpointComputeType\\":\\"K8S\\",\\"environmentId\\":\\"/subscriptions/{subscription_id}/resourceGroup'
+ 's/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/environments/env123\\",\\"l'
+ 'ivenessProbe\\":{{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"succ'
+ 'essThreshold\\":50,\\"timeout\\":\\"PT1M\\"}},\\"model\\":{{\\"assetId\\":\\"/subscriptions/{subscription'
+ '_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/models/mo'
+ 'del123\\",\\"referenceType\\":\\"Id\\"}},\\"properties\\":{{\\"additionalProp1\\":\\"string\\",\\"additio'
+ 'nalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}},\\"provisioningState\\":\\"Creating\\",\\"r'
+ 'equestSettings\\":{{\\"maxConcurrentRequestsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTime'
+ 'out\\":\\"PT1M\\"}},\\"scaleSettings\\":{{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"'
+ 'targetUtilizationPercentage\\":50}}}}" '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/put/CreateOrUpdate Managed Online Deployment.
+@try_manual
+def step_online_deployment_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment create '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--location "string" '
+ '--properties "{{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfiguration\\":{{\\'
+ '"codeId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningS'
+ 'ervices/workspaces/{myWorkspace}/codes/code123/versions/1\\",\\"scoringScript\\":\\"string\\"}},\\"endpoi'
+ 'ntComputeType\\":\\"Managed\\",\\"environmentId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg'
+ '_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/environments/env123\\",\\"livene'
+ 'ssProbe\\":{{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successTh'
+ 'reshold\\":50,\\"timeout\\":\\"PT1M\\"}},\\"model\\":{{\\"assetId\\":\\"/subscriptions/{subscription_id}/'
+ 'resourceGroups/{rg_8}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace}/models/model12'
+ '3\\",\\"referenceType\\":\\"Id\\"}},\\"properties\\":{{\\"additionalProp1\\":\\"string\\",\\"additionalPr'
+ 'op2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"}},\\"provisioningState\\":\\"Creating\\",\\"reques'
+ 'tSettings\\":{{\\"maxConcurrentRequestsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTimeout\\'
+ '":\\"PT1M\\"}},\\"scaleSettings\\":{{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"targe'
+ 'tUtilizationPercentage\\":50}}}}" '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/get/Get K8S Online Deployment.
+@try_manual
+def step_online_deployment_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment show '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/get/Get Managed Online Deployment.
+@try_manual
+def step_online_deployment_show2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ return step_online_deployment_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /OnlineDeployments/get/List Online Deployments.
+@try_manual
+def step_online_deployment_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment list '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/patch/Update K8S Online Deployment.
+@try_manual
+def step_online_deployment_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment update '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--properties "{{\\"containerResourceRequirements\\":{{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\'
+ '"memoryInGBLimit\\":64}},\\"endpointComputeType\\":\\"K8S\\",\\"scaleSettings\\":{{\\"pollingInterval\\":'
+ '\\"PT1M\\",\\"scaleType\\":\\"Auto\\"}}}}" '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/patch/Update Managed Online Deployment.
+@try_manual
+def step_online_deployment_update2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment update '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--properties "{{\\"endpointComputeType\\":\\"Managed\\",\\"readinessProbe\\":{{\\"failureThreshold\\":50,'
+ '\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshold\\":50,\\"timeout\\":\\"PT1M\\"}'
+ '},\\"scaleSettings\\":{{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\"}}}}" '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/post/Get Online Deployment Logs.
+@try_manual
+def step_online_deployment_get_log(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment get-log '
+ '--container-type "StorageInitializer" '
+ '--tail 0 '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineDeployments/delete/Delete Online Deployment.
+@try_manual
+def step_online_deployment_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-deployment delete -y '
+ '--deployment-name "testDeployment" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/put/CreateOrUpdate Online Endpoint.
+@try_manual
+def step_online_endpoint_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint create '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--location "string" '
+ '--properties description="string" authMode="AMLToken" keys={{"primaryKey":"string","secondaryKey":"string'
+ '"}} properties={{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"}} '
+ 'target="/subscriptions/{subscription_id}/resourceGroups/{rg_8}/providers/Microsoft.MachineLearningService'
+ 's/workspaces/{myWorkspace}/computes/{{myCompute}}" traffic={{"myDeployment1":0,"myDeployment2":1}} '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/get/Get Online Endpoint.
+@try_manual
+def step_online_endpoint_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint show '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/get/List Online Endpoint.
+@try_manual
+def step_online_endpoint_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/patch/Update Online Endpoint.
+@try_manual
+def step_online_endpoint_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint update '
+ '--type "UserAssigned" '
+ '--user-assigned-identities "{{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resou'
+ 'rceGroup-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myuseridentity\\":{{\\"clientId'
+ '\\":\\"string\\",\\"principalId\\":\\"string\\"}}}}" '
+ '--kind "string" '
+ '--traffic myDeployment1=0 myDeployment2=1 '
+ '--tags additionalProp1="string" additionalProp2="string" additionalProp3="string" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/post/GetToken Online Endpoint.
+@try_manual
+def step_online_endpoint_get_token(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint get-token '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/post/ListKeys Online Endpoint.
+@try_manual
+def step_online_endpoint_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint list-key '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/post/RegenerateKeys Online Endpoint.
+@try_manual
+def step_online_endpoint_regenerate_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint regenerate-key '
+ '--key-type "Primary" '
+ '--key-value "string" '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /OnlineEndpoints/delete/Delete Online Endpoint.
+@try_manual
+def step_online_endpoint_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices online-endpoint delete -y '
+ '--endpoint-name "testEndpoint" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace6}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/put/WorkspacePutPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection create '
+ '--name "{myPrivateEndpointConnection}" '
+ '--private-link-service-connection-state description="Auto-Approved" status="Approved" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/get/StorageAccountListPrivateEndpointConnections
+@try_manual
+def step_private_endpoint_connection_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection list '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/get/WorkspaceGetPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection show '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/delete/WorkspaceDeletePrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection delete -y '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateLinkResources/get/WorkspaceListPrivateLinkResources
+@try_manual
+def step_private_link_resource_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-link-resource list '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/get/List workspace quotas by VMFamily
+@try_manual
+def step_quota_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/post/update quotas
+@try_manual
+def step_quota_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota update '
+ '--location "eastus" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_4}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace3}/quotas/{myQuot'
+ 'a}" limit=100 unit="Count" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_4}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace4}/quotas/{myQuot'
+ 'a}" limit=200 unit="Count"',
+ checks=checks)
+
+
+# EXAMPLE: /Usages/get/List Usages
+@try_manual
+def step_usage_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices usage list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /VirtualMachineSizes/get/List VM Sizes
+@try_manual
+def step_virtual_machine_size_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices virtual-machine-size list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/put/CreateWorkspaceConnection
+@try_manual
+def step_workspace_connection_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection create '
+ '--connection-name "connection-1" '
+ '--auth-type "PAT" '
+ '--category "ACR" '
+ '--target "www.facebook.com" '
+ '--value "secrets" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/GetWorkspaceConnection
+@try_manual
+def step_workspace_connection_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection show '
+ '--connection-name "connection-1" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/ListWorkspaceConnections
+@try_manual
+def step_workspace_connection_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection list '
+ '--category "ACR" '
+ '--resource-group "{rg_7}" '
+ '--target "www.facebook.com" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/delete/DeleteWorkspaceConnection
+@try_manual
+def step_workspace_connection_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection delete -y '
+ '--connection-name "connection-1" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceFeatures/get/List Workspace features
+@try_manual
+def step_workspace_feature_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-feature list '
+ '--resource-group "{rg_5}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/delete/Delete Workspace
+@try_manual
+def step_workspace_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace delete -y '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceSkus/get/List Skus
+@try_manual
+def step_workspace_sku_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-sku list',
+ checks=checks)
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
new file mode 100644
index 00000000000..66c0a7c8893
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
@@ -0,0 +1,446 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+import os
+from azure.cli.testsdk import ScenarioTest
+from azure.cli.testsdk import ResourceGroupPreparer
+from azure.cli.testsdk import StorageAccountPreparer
+from .example_steps import step_workspace_create
+from .example_steps import step_workspace_show
+from .example_steps import step_workspace_list
+from .example_steps import step_workspace_list2
+from .example_steps import step_workspace_update
+from .example_steps import step_workspace_list_key
+from .example_steps import step_workspace_prepare_notebook
+from .example_steps import step_workspace_resync_key
+from .example_steps import step_batch_deployment_create
+from .example_steps import step_batch_deployment_show
+from .example_steps import step_batch_deployment_list
+from .example_steps import step_batch_deployment_update
+from .example_steps import step_batch_deployment_delete
+from .example_steps import step_batch_endpoint_create
+from .example_steps import step_batch_endpoint_show
+from .example_steps import step_batch_endpoint_list
+from .example_steps import step_batch_endpoint_update
+from .example_steps import step_batch_endpoint_list_key
+from .example_steps import step_batch_endpoint_delete
+from .example_steps import step_code_container_create
+from .example_steps import step_code_container_show
+from .example_steps import step_code_container_list
+from .example_steps import step_code_container_delete
+from .example_steps import step_code_version_create
+from .example_steps import step_code_version_show
+from .example_steps import step_code_version_list
+from .example_steps import step_code_version_delete
+from .example_steps import step_compute_create
+from .example_steps import step_compute_create2
+from .example_steps import step_compute_create3
+from .example_steps import step_compute_create4
+from .example_steps import step_compute_create5
+from .example_steps import step_compute_create6
+from .example_steps import step_compute_show
+from .example_steps import step_compute_show2
+from .example_steps import step_compute_show3
+from .example_steps import step_compute_list
+from .example_steps import step_compute_update
+from .example_steps import step_compute_list_node
+from .example_steps import step_compute_list_key
+from .example_steps import step_compute_restart
+from .example_steps import step_compute_start
+from .example_steps import step_compute_stop
+from .example_steps import step_compute_update_schedule
+from .example_steps import step_compute_delete
+from .example_steps import step_data_container_create
+from .example_steps import step_data_container_show
+from .example_steps import step_data_container_list
+from .example_steps import step_data_container_delete
+from .example_steps import step_datastore_create
+from .example_steps import step_datastore_create2
+from .example_steps import step_datastore_create3
+from .example_steps import step_datastore_create4
+from .example_steps import step_datastore_create5
+from .example_steps import step_datastore_create6
+from .example_steps import step_datastore_show
+from .example_steps import step_datastore_list
+from .example_steps import step_datastore_list_secret
+from .example_steps import step_data_version_create
+from .example_steps import step_data_version_show
+from .example_steps import step_data_version_list
+from .example_steps import step_data_version_delete
+from .example_steps import step_datastore_delete
+from .example_steps import step_environment_container_create
+from .example_steps import step_environment_container_show
+from .example_steps import step_environment_container_list
+from .example_steps import step_environment_container_delete
+from .example_steps import step_environment_specification_version_create
+from .example_steps import step_environment_specification_version_show
+from .example_steps import step_environment_specification_version_list
+from .example_steps import step_environment_specification_version_delete
+from .example_steps import step_job_create
+from .example_steps import step_job_create2
+from .example_steps import step_job_show
+from .example_steps import step_job_show2
+from .example_steps import step_job_list
+from .example_steps import step_job_list2
+from .example_steps import step_job_cancel
+from .example_steps import step_job_delete
+from .example_steps import step_labeling_job_create
+from .example_steps import step_labeling_job_show
+from .example_steps import step_labeling_job_list
+from .example_steps import step_labeling_job_export_label
+from .example_steps import step_labeling_job_pause
+from .example_steps import step_labeling_job_resume
+from .example_steps import step_labeling_job_delete
+from .example_steps import step_model_container_create
+from .example_steps import step_model_container_show
+from .example_steps import step_model_container_list
+from .example_steps import step_model_container_delete
+from .example_steps import step_model_version_create
+from .example_steps import step_model_version_show
+from .example_steps import step_model_version_list
+from .example_steps import step_model_version_delete
+from .example_steps import step_online_deployment_create
+from .example_steps import step_online_deployment_create2
+from .example_steps import step_online_deployment_show
+from .example_steps import step_online_deployment_show2
+from .example_steps import step_online_deployment_list
+from .example_steps import step_online_deployment_update
+from .example_steps import step_online_deployment_update2
+from .example_steps import step_online_deployment_get_log
+from .example_steps import step_online_deployment_delete
+from .example_steps import step_online_endpoint_create
+from .example_steps import step_online_endpoint_show
+from .example_steps import step_online_endpoint_list
+from .example_steps import step_online_endpoint_update
+from .example_steps import step_online_endpoint_get_token
+from .example_steps import step_online_endpoint_list_key
+from .example_steps import step_online_endpoint_regenerate_key
+from .example_steps import step_online_endpoint_delete
+from .example_steps import step_private_endpoint_connection_create
+from .example_steps import step_private_endpoint_connection_list
+from .example_steps import step_private_endpoint_connection_show
+from .example_steps import step_private_endpoint_connection_delete
+from .example_steps import step_private_link_resource_list
+from .example_steps import step_quota_list
+from .example_steps import step_quota_update
+from .example_steps import step_usage_list
+from .example_steps import step_virtual_machine_size_list
+from .example_steps import step_workspace_connection_create
+from .example_steps import step_workspace_connection_show
+from .example_steps import step_workspace_connection_list
+from .example_steps import step_workspace_connection_delete
+from .example_steps import step_workspace_feature_list
+from .example_steps import step_workspace_delete
+from .example_steps import step_workspace_sku_list
+from .. import (
+ try_manual,
+ raise_if,
+ calc_coverage
+)
+
+
+TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+
+# Env setup_scenario
+@try_manual
+def setup_scenario(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7):
+ pass
+
+
+# Env cleanup_scenario
+@try_manual
+def cleanup_scenario(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7):
+ pass
+
+
+# Testcase: Scenario
+@try_manual
+def call_scenario(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7):
+ setup_scenario(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7)
+ step_workspace_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("encryption.identity.userAssignedIdentity", "/subscriptions/{subscription_id}/resourceGroups/{rg}/pr"
+ "oviders/Microsoft.ManagedIdentity/userAssignedIdentities/testuai", case_sensitive=False),
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("encryption.identity.userAssignedIdentity", "/subscriptions/{subscription_id}/resourceGroups/{rg}/pr"
+ "oviders/Microsoft.ManagedIdentity/userAssignedIdentities/testuai", case_sensitive=False),
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_workspace_list2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check('length(@)', 2),
+ ])
+ step_workspace_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "new description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("friendlyName", "New friendly name", case_sensitive=False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_prepare_notebook(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_resync_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_deployment_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_deployment_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_deployment_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_deployment_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_deployment_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_endpoint_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_endpoint_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_endpoint_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_endpoint_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_endpoint_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_batch_endpoint_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_code_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_compute_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create3(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create4(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create5(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create6(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_show2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ ])
+ step_compute_show3(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ ])
+ step_compute_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_compute_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ ])
+ step_compute_list_node(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_compute_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_compute_restart(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_compute_start(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_compute_stop(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_compute_update_schedule(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_compute_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_create3(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_create4(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_create5(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_create6(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_list_secret(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_data_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_datastore_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_specification_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_specification_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_specification_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_environment_specification_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_show2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_list2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_cancel(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_job_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_labeling_job_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_labeling_job_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_labeling_job_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_labeling_job_export_label(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_labeling_job_pause(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_labeling_job_resume(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_labeling_job_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_container_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_container_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_container_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_container_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_version_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_version_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_version_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_model_version_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_create2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_show2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_update2(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_get_log(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_deployment_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_get_token(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_list_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_regenerate_key(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_online_endpoint_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_private_endpoint_connection_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False),
+ ])
+ step_private_endpoint_connection_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_private_endpoint_connection_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False),
+ ])
+ step_private_endpoint_connection_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_private_link_resource_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_quota_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_quota_update(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_usage_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_virtual_machine_size_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_create(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_show(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_feature_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_delete(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_sku_list(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7, checks=[])
+ cleanup_scenario(test, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7)
+
+
+# Test class for Scenario
+@try_manual
+class MachinelearningservicesScenarioTest(ScenarioTest):
+
+ def __init__(self, *args, **kwargs):
+ super(MachinelearningservicesScenarioTest, self).__init__(*args, **kwargs)
+ self.kwargs.update({
+ 'subscription_id': self.get_subscription_id()
+ })
+
+ self.kwargs.update({
+ 'myWorkspace7': 'default',
+ 'myPrivateLinkResource2': 'default',
+ 'myWorkspace3': 'demo_workspace1',
+ 'myWorkspace4': 'demo_workspace2',
+ 'myWorkspace': 'testworkspace',
+ 'myWorkspace6': 'workspace123',
+ 'myWorkspace2': 'workspaces123',
+ 'myWorkspace5': 'workspace-1',
+ 'myQuota': 'Standard_DSv2_Family_Cluster_Dedicated_vCPUs',
+ 'myCompute': 'compute123',
+ 'myPrivateEndpointConnection': '{privateEndpointConnectionName}',
+ 'myPrivateLinkResource': 'Sql',
+ 'myDatastore': 'mydatastore',
+ 'myDatastore2': 'datastore123',
+ })
+
+
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_workspace-1234'[:7], key='rg',
+ parameter_name='rg')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_accountcrud-1234'[:7], key='rg_2',
+ parameter_name='rg_2')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg'[:7], key='rg_4', parameter_name='rg_4')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_myResourceGroup'[:7], key='rg_5',
+ parameter_name='rg_5')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_resourceGroup-1234'[:7], key='rg_8',
+ parameter_name='rg_8')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_testrg123'[:7], key='rg_3',
+ parameter_name='rg_3')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg-1234'[:7], key='rg_6',
+ parameter_name='rg_6')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_resourceGroup-1'[:7], key='rg_7',
+ parameter_name='rg_7')
+ @StorageAccountPreparer(name_prefix='clitestmachinelearningservices_testStorageAccount'[:7], key='sa',
+ resource_group_parameter_name='rg_2')
+ def test_machinelearningservices_Scenario(self, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7):
+ call_scenario(self, rg, rg_2, rg_4, rg_5, rg_8, rg_3, rg_6, rg_7)
+ calc_coverage(__file__)
+ raise_if()
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
new file mode 100644
index 00000000000..dad2c6eeb01
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
@@ -0,0 +1,16 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+try:
+ from ._patch import patch_sdk # type: ignore
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..c26b38d66ee
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
@@ -0,0 +1,194 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.mgmt.core import ARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Optional
+
+ from azure.core.credentials import TokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import ComputeOperations
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import BatchEndpointsOperations
+from .operations import BatchDeploymentsOperations
+from .operations import CodeContainersOperations
+from .operations import CodeVersionsOperations
+from .operations import DataContainersOperations
+from .operations import DataVersionsOperations
+from .operations import DatastoresOperations
+from .operations import EnvironmentContainersOperations
+from .operations import EnvironmentSpecificationVersionsOperations
+from .operations import JobsOperations
+from .operations import LabelingJobsOperations
+from .operations import ModelContainersOperations
+from .operations import ModelVersionsOperations
+from .operations import OnlineEndpointsOperations
+from .operations import OnlineDeploymentsOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import WorkspaceSkusOperations
+from . import models
+
+
+class AzureMachineLearningWorkspaces(object):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.operations.WorkspacesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.operations.QuotasOperations
+ :ivar compute: ComputeOperations operations
+ :vartype compute: azure_machine_learning_workspaces.operations.ComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.operations.PrivateLinkResourcesOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.operations.WorkspaceConnectionsOperations
+ :ivar batch_endpoints: BatchEndpointsOperations operations
+ :vartype batch_endpoints: azure_machine_learning_workspaces.operations.BatchEndpointsOperations
+ :ivar batch_deployments: BatchDeploymentsOperations operations
+ :vartype batch_deployments: azure_machine_learning_workspaces.operations.BatchDeploymentsOperations
+ :ivar code_containers: CodeContainersOperations operations
+ :vartype code_containers: azure_machine_learning_workspaces.operations.CodeContainersOperations
+ :ivar code_versions: CodeVersionsOperations operations
+ :vartype code_versions: azure_machine_learning_workspaces.operations.CodeVersionsOperations
+ :ivar data_containers: DataContainersOperations operations
+ :vartype data_containers: azure_machine_learning_workspaces.operations.DataContainersOperations
+ :ivar data_versions: DataVersionsOperations operations
+ :vartype data_versions: azure_machine_learning_workspaces.operations.DataVersionsOperations
+ :ivar datastores: DatastoresOperations operations
+ :vartype datastores: azure_machine_learning_workspaces.operations.DatastoresOperations
+ :ivar environment_containers: EnvironmentContainersOperations operations
+ :vartype environment_containers: azure_machine_learning_workspaces.operations.EnvironmentContainersOperations
+ :ivar environment_specification_versions: EnvironmentSpecificationVersionsOperations operations
+ :vartype environment_specification_versions: azure_machine_learning_workspaces.operations.EnvironmentSpecificationVersionsOperations
+ :ivar jobs: JobsOperations operations
+ :vartype jobs: azure_machine_learning_workspaces.operations.JobsOperations
+ :ivar labeling_jobs: LabelingJobsOperations operations
+ :vartype labeling_jobs: azure_machine_learning_workspaces.operations.LabelingJobsOperations
+ :ivar model_containers: ModelContainersOperations operations
+ :vartype model_containers: azure_machine_learning_workspaces.operations.ModelContainersOperations
+ :ivar model_versions: ModelVersionsOperations operations
+ :vartype model_versions: azure_machine_learning_workspaces.operations.ModelVersionsOperations
+ :ivar online_endpoints: OnlineEndpointsOperations operations
+ :vartype online_endpoints: azure_machine_learning_workspaces.operations.OnlineEndpointsOperations
+ :ivar online_deployments: OnlineDeploymentsOperations operations
+ :vartype online_deployments: azure_machine_learning_workspaces.operations.OnlineDeploymentsOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.operations.WorkspaceFeaturesOperations
+ :ivar workspace_skus: WorkspaceSkusOperations operations
+ :vartype workspace_skus: azure_machine_learning_workspaces.operations.WorkspaceSkusOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ base_url=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.compute = ComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.batch_endpoints = BatchEndpointsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.batch_deployments = BatchDeploymentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_containers = CodeContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_versions = CodeVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_containers = DataContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_versions = DataVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.datastores = DatastoresOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_containers = EnvironmentContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_specification_versions = EnvironmentSpecificationVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.jobs = JobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.labeling_jobs = LabelingJobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_containers = ModelContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_versions = ModelVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_endpoints = OnlineEndpointsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_deployments = OnlineDeploymentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_skus = WorkspaceSkusOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> AzureMachineLearningWorkspaces
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
new file mode 100644
index 00000000000..4f4f57b5855
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
@@ -0,0 +1,70 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2021-03-01-preview"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
new file mode 100644
index 00000000000..872474577c4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
@@ -0,0 +1,10 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..4c94967cd1d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,188 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Optional, TYPE_CHECKING
+
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import ComputeOperations
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import BatchEndpointsOperations
+from .operations import BatchDeploymentsOperations
+from .operations import CodeContainersOperations
+from .operations import CodeVersionsOperations
+from .operations import DataContainersOperations
+from .operations import DataVersionsOperations
+from .operations import DatastoresOperations
+from .operations import EnvironmentContainersOperations
+from .operations import EnvironmentSpecificationVersionsOperations
+from .operations import JobsOperations
+from .operations import LabelingJobsOperations
+from .operations import ModelContainersOperations
+from .operations import ModelVersionsOperations
+from .operations import OnlineEndpointsOperations
+from .operations import OnlineDeploymentsOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import WorkspaceSkusOperations
+from .. import models
+
+
+class AzureMachineLearningWorkspaces(object):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.aio.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.aio.operations.WorkspacesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.aio.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.aio.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.aio.operations.QuotasOperations
+ :ivar compute: ComputeOperations operations
+ :vartype compute: azure_machine_learning_workspaces.aio.operations.ComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.aio.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.aio.operations.PrivateLinkResourcesOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.aio.operations.WorkspaceConnectionsOperations
+ :ivar batch_endpoints: BatchEndpointsOperations operations
+ :vartype batch_endpoints: azure_machine_learning_workspaces.aio.operations.BatchEndpointsOperations
+ :ivar batch_deployments: BatchDeploymentsOperations operations
+ :vartype batch_deployments: azure_machine_learning_workspaces.aio.operations.BatchDeploymentsOperations
+ :ivar code_containers: CodeContainersOperations operations
+ :vartype code_containers: azure_machine_learning_workspaces.aio.operations.CodeContainersOperations
+ :ivar code_versions: CodeVersionsOperations operations
+ :vartype code_versions: azure_machine_learning_workspaces.aio.operations.CodeVersionsOperations
+ :ivar data_containers: DataContainersOperations operations
+ :vartype data_containers: azure_machine_learning_workspaces.aio.operations.DataContainersOperations
+ :ivar data_versions: DataVersionsOperations operations
+ :vartype data_versions: azure_machine_learning_workspaces.aio.operations.DataVersionsOperations
+ :ivar datastores: DatastoresOperations operations
+ :vartype datastores: azure_machine_learning_workspaces.aio.operations.DatastoresOperations
+ :ivar environment_containers: EnvironmentContainersOperations operations
+ :vartype environment_containers: azure_machine_learning_workspaces.aio.operations.EnvironmentContainersOperations
+ :ivar environment_specification_versions: EnvironmentSpecificationVersionsOperations operations
+ :vartype environment_specification_versions: azure_machine_learning_workspaces.aio.operations.EnvironmentSpecificationVersionsOperations
+ :ivar jobs: JobsOperations operations
+ :vartype jobs: azure_machine_learning_workspaces.aio.operations.JobsOperations
+ :ivar labeling_jobs: LabelingJobsOperations operations
+ :vartype labeling_jobs: azure_machine_learning_workspaces.aio.operations.LabelingJobsOperations
+ :ivar model_containers: ModelContainersOperations operations
+ :vartype model_containers: azure_machine_learning_workspaces.aio.operations.ModelContainersOperations
+ :ivar model_versions: ModelVersionsOperations operations
+ :vartype model_versions: azure_machine_learning_workspaces.aio.operations.ModelVersionsOperations
+ :ivar online_endpoints: OnlineEndpointsOperations operations
+ :vartype online_endpoints: azure_machine_learning_workspaces.aio.operations.OnlineEndpointsOperations
+ :ivar online_deployments: OnlineDeploymentsOperations operations
+ :vartype online_deployments: azure_machine_learning_workspaces.aio.operations.OnlineDeploymentsOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.aio.operations.WorkspaceFeaturesOperations
+ :ivar workspace_skus: WorkspaceSkusOperations operations
+ :vartype workspace_skus: azure_machine_learning_workspaces.aio.operations.WorkspaceSkusOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ base_url: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.compute = ComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.batch_endpoints = BatchEndpointsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.batch_deployments = BatchDeploymentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_containers = CodeContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.code_versions = CodeVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_containers = DataContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_versions = DataVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.datastores = DatastoresOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_containers = EnvironmentContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.environment_specification_versions = EnvironmentSpecificationVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.jobs = JobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.labeling_jobs = LabelingJobsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_containers = ModelContainersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.model_versions = ModelVersionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_endpoints = OnlineEndpointsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.online_deployments = OnlineDeploymentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_skus = WorkspaceSkusOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
new file mode 100644
index 00000000000..ce08b530c37
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
@@ -0,0 +1,66 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ **kwargs: Any
+ ) -> None:
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2021-03-01-preview"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs: Any
+ ) -> None:
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
new file mode 100644
index 00000000000..5aa4d95e2b4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
@@ -0,0 +1,63 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._compute_operations import ComputeOperations
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._batch_endpoints_operations import BatchEndpointsOperations
+from ._batch_deployments_operations import BatchDeploymentsOperations
+from ._code_containers_operations import CodeContainersOperations
+from ._code_versions_operations import CodeVersionsOperations
+from ._data_containers_operations import DataContainersOperations
+from ._data_versions_operations import DataVersionsOperations
+from ._datastores_operations import DatastoresOperations
+from ._environment_containers_operations import EnvironmentContainersOperations
+from ._environment_specification_versions_operations import EnvironmentSpecificationVersionsOperations
+from ._jobs_operations import JobsOperations
+from ._labeling_jobs_operations import LabelingJobsOperations
+from ._model_containers_operations import ModelContainersOperations
+from ._model_versions_operations import ModelVersionsOperations
+from ._online_endpoints_operations import OnlineEndpointsOperations
+from ._online_deployments_operations import OnlineDeploymentsOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._workspace_skus_operations import WorkspaceSkusOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'ComputeOperations',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'WorkspaceConnectionsOperations',
+ 'BatchEndpointsOperations',
+ 'BatchDeploymentsOperations',
+ 'CodeContainersOperations',
+ 'CodeVersionsOperations',
+ 'DataContainersOperations',
+ 'DataVersionsOperations',
+ 'DatastoresOperations',
+ 'EnvironmentContainersOperations',
+ 'EnvironmentSpecificationVersionsOperations',
+ 'JobsOperations',
+ 'LabelingJobsOperations',
+ 'ModelContainersOperations',
+ 'ModelVersionsOperations',
+ 'OnlineEndpointsOperations',
+ 'OnlineDeploymentsOperations',
+ 'WorkspaceFeaturesOperations',
+ 'WorkspaceSkusOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_batch_deployments_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_batch_deployments_operations.py
new file mode 100644
index 00000000000..963ccde8ea4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_batch_deployments_operations.py
@@ -0,0 +1,431 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class BatchDeploymentsOperations:
+ """BatchDeploymentsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.BatchDeploymentTrackedResourceArmPaginatedResult"]:
+ """Lists Batch inference deployments in the workspace.
+
+ Lists Batch inference deployments in the workspace.
+
+ :param endpoint_name: Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Top of list.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BatchDeploymentTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('BatchDeploymentTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments'} # type: ignore
+
+ async def delete(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete Batch Inference deployment.
+
+ Delete Batch Inference deployment.
+
+ :param endpoint_name: Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference deployment identifier.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def get(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.BatchDeploymentTrackedResource":
+ """Gets a batch inference deployment by id.
+
+ Gets a batch inference deployment by id.
+
+ :param endpoint_name: Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The identifier for the Batch deployments.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def update(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialBatchDeploymentPartialTrackedResource",
+ **kwargs
+ ) -> "models.BatchDeploymentTrackedResource":
+ """Update a batch inference deployment.
+
+ Update a batch inference deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The identifier for the Batch inference deployment.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Batch inference deployment definition object.
+ :type body: ~azure_machine_learning_workspaces.models.PartialBatchDeploymentPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialBatchDeploymentPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.BatchDeploymentTrackedResource",
+ **kwargs
+ ) -> "models.BatchDeploymentTrackedResource":
+ """Creates/updates a batch inference deployment.
+
+ Creates/updates a batch inference deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The identifier for the Batch inference deployment.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Batch inference deployment definition object.
+ :type body: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'BatchDeploymentTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_batch_endpoints_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_batch_endpoints_operations.py
new file mode 100644
index 00000000000..c76db294eb0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_batch_endpoints_operations.py
@@ -0,0 +1,471 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class BatchEndpointsOperations:
+ """BatchEndpointsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ count: Optional[int] = None,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.BatchEndpointTrackedResourceArmPaginatedResult"]:
+ """Lists Batch inference endpoint in the workspace.
+
+ Lists Batch inference endpoint in the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param count: Number of endpoints to be retrieved in a page of results.
+ :type count: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BatchEndpointTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.BatchEndpointTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('BatchEndpointTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints'} # type: ignore
+
+ async def delete(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete Batch Inference Endpoint.
+
+ Delete Batch Inference Endpoint.
+
+ :param endpoint_name: Inference Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ async def get(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.BatchEndpointTrackedResource":
+ """Gets a batch inference endpoint by name.
+
+ Gets a batch inference endpoint by name.
+
+ :param endpoint_name: Name for the Batch Endpoint.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ async def update(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialBatchEndpointPartialTrackedResource",
+ **kwargs
+ ) -> "models.BatchEndpointTrackedResource":
+ """Update a batch inference endpoint.
+
+ Update a batch inference endpoint.
+
+ :param endpoint_name: Name for the Batch inference endpoint.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Mutable batch inference endpoint definition object.
+ :type body: ~azure_machine_learning_workspaces.models.PartialBatchEndpointPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialBatchEndpointPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.BatchEndpointTrackedResource",
+ **kwargs
+ ) -> "models.BatchEndpointTrackedResource":
+ """Creates a batch inference endpoint.
+
+ Creates a batch inference endpoint.
+
+ :param endpoint_name: Name for the Batch inference endpoint.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Batch inference endpoint definition object.
+ :type body: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'BatchEndpointTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ async def list_keys(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EndpointAuthKeys":
+ """Lists batch Inference Endpoint keys.
+
+ Lists batch Inference Endpoint keys.
+
+ :param endpoint_name: Inference Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthKeys, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthKeys"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthKeys', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_containers_operations.py
new file mode 100644
index 00000000000..f4a0f202b70
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_containers_operations.py
@@ -0,0 +1,328 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeContainersOperations:
+ """CodeContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.CodeContainerResourceArmPaginatedResult"]:
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.CodeContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.CodeContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.CodeContainerResource",
+ **kwargs
+ ) -> "models.CodeContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_versions_operations.py
new file mode 100644
index 00000000000..ce8613c7534
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_code_versions_operations.py
@@ -0,0 +1,354 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeVersionsOperations:
+ """CodeVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.CodeVersionResourceArmPaginatedResult"]:
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.CodeVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.CodeVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.CodeVersionResource",
+ **kwargs
+ ) -> "models.CodeVersionResource":
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_compute_operations.py
new file mode 100644
index 00000000000..1f13b45d892
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_compute_operations.py
@@ -0,0 +1,1097 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ComputeOperations:
+ """ComputeOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedComputeResourcesList"]:
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeResource":
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.AmlComputeNodesInformation"]:
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeSecrets":
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ async def _start_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._start_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def begin_start(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._start_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def _stop_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._stop_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def begin_stop(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._stop_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def restart(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
+
+ async def update_schedules(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: Optional["models.ComputeSchedules"] = None,
+ **kwargs
+ ) -> None:
+ """Updates schedules of a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: The object for updating schedules of specified ComputeInstance.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeSchedules
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update_schedules.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if parameters is not None:
+ body_content = self._serialize.body(parameters, 'ComputeSchedules')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ update_schedules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateSchedules'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_containers_operations.py
new file mode 100644
index 00000000000..d91eb25aefd
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_containers_operations.py
@@ -0,0 +1,328 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DataContainersOperations:
+ """DataContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.DataContainerResourceArmPaginatedResult"]:
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.DataContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DataContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DataContainerResource",
+ **kwargs
+ ) -> "models.DataContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_versions_operations.py
new file mode 100644
index 00000000000..a1eea0a2d42
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_data_versions_operations.py
@@ -0,0 +1,360 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DataVersionsOperations:
+ """DataVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skip: Optional[str] = None,
+ tags: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.DataVersionResourceArmPaginatedResult"]:
+ """List data versions.
+
+ List data versions.
+
+ :param name: Data name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param tags: Comma-separated list of tag names (and optionally values). Example:
+ tag1,tag2=value2.
+ :type tags: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.DataVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if tags is not None:
+ query_parameters['$tags'] = self._serialize.query("tags", tags, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DataVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DataVersionResource",
+ **kwargs
+ ) -> "models.DataVersionResource":
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_datastores_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_datastores_operations.py
new file mode 100644
index 00000000000..aaf16667e3d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_datastores_operations.py
@@ -0,0 +1,428 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DatastoresOperations:
+ """DatastoresOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ count: Optional[int] = 30,
+ is_default: Optional[bool] = None,
+ names: Optional[List[str]] = None,
+ search_text: Optional[str] = None,
+ order_by: Optional[str] = None,
+ order_by_asc: Optional[bool] = False,
+ **kwargs
+ ) -> AsyncIterable["models.DatastorePropertiesResourceArmPaginatedResult"]:
+ """List datastores.
+
+ List datastores.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :param is_default: Filter down to the workspace default datastore.
+ :type is_default: bool
+ :param names: Names of datastores to return.
+ :type names: list[str]
+ :param search_text: Text to search for in the datastore names.
+ :type search_text: str
+ :param order_by: Order by property (createdtime | modifiedtime | name).
+ :type order_by: str
+ :param order_by_asc: Order by property in ascending order.
+ :type order_by_asc: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DatastorePropertiesResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.DatastorePropertiesResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if is_default is not None:
+ query_parameters['isDefault'] = self._serialize.query("is_default", is_default, 'bool')
+ if names is not None:
+ query_parameters['names'] = self._serialize.query("names", names, '[str]')
+ if search_text is not None:
+ query_parameters['searchText'] = self._serialize.query("search_text", search_text, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if order_by_asc is not None:
+ query_parameters['orderByAsc'] = self._serialize.query("order_by_asc", order_by_asc, 'bool')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('DatastorePropertiesResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete datastore.
+
+ Delete datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DatastorePropertiesResource":
+ """Get datastore.
+
+ Get datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DatastorePropertiesResource",
+ skip_validation: Optional[bool] = False,
+ **kwargs
+ ) -> "models.DatastorePropertiesResource":
+ """Create or update datastore.
+
+ Create or update datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Datastore entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :param skip_validation: Flag to skip validation.
+ :type skip_validation: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_validation is not None:
+ query_parameters['skipValidation'] = self._serialize.query("skip_validation", skip_validation, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DatastorePropertiesResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ async def list_secrets(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.DatastoreSecrets":
+ """Get datastore secrets.
+
+ Get datastore secrets.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastoreSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastoreSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastoreSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_secrets.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastoreSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_secrets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_containers_operations.py
new file mode 100644
index 00000000000..fae8848ad0a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_containers_operations.py
@@ -0,0 +1,328 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentContainersOperations:
+ """EnvironmentContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.EnvironmentContainerResourceArmPaginatedResult"]:
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.EnvironmentContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EnvironmentContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.EnvironmentContainerResource",
+ **kwargs
+ ) -> "models.EnvironmentContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_specification_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_specification_versions_operations.py
new file mode 100644
index 00000000000..02470f0d669
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_environment_specification_versions_operations.py
@@ -0,0 +1,354 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentSpecificationVersionsOperations:
+ """EnvironmentSpecificationVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]:
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentSpecificationVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EnvironmentSpecificationVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.EnvironmentSpecificationVersionResource",
+ **kwargs
+ ) -> "models.EnvironmentSpecificationVersionResource":
+ """Creates or updates an EnvironmentSpecificationVersion.
+
+ Creates or updates an EnvironmentSpecificationVersion.
+
+ :param name: Name of EnvironmentSpecificationVersion.
+ :type name: str
+ :param version: Version of EnvironmentSpecificationVersion.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Definition of EnvironmentSpecificationVersion.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentSpecificationVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_jobs_operations.py
new file mode 100644
index 00000000000..d5b57f8acbc
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_jobs_operations.py
@@ -0,0 +1,469 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class JobsOperations:
+ """JobsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ job_type: Optional[str] = None,
+ tags: Optional[str] = None,
+ tag: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.JobBaseResourceArmPaginatedResult"]:
+ """Lists Jobs in the workspace.
+
+ Lists Jobs in the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param job_type: Type of job to be returned.
+ :type job_type: str
+ :param tags: Tags for job to be returned.
+ :type tags: str
+ :param tag: Jobs returned will have this tag key.
+ :type tag: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.JobBaseResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if job_type is not None:
+ query_parameters['jobType'] = self._serialize.query("job_type", job_type, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('JobBaseResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes a Job (asynchronous).
+
+ Deletes a Job (asynchronous).
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ async def get(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.JobBaseResource":
+ """Gets a Job by name/id.
+
+ Gets a Job by name/id.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.JobBaseResource",
+ **kwargs
+ ) -> "models.JobBaseResource":
+ """Creates and executes a Job.
+
+ Creates and executes a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Job definition object.
+ :type body: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'JobBaseResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ async def cancel(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Cancels a Job.
+
+ Cancels a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.cancel.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_labeling_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_labeling_jobs_operations.py
new file mode 100644
index 00000000000..d08c4ac373b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_labeling_jobs_operations.py
@@ -0,0 +1,741 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class LabelingJobsOperations:
+ """LabelingJobsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ count: Optional[int] = None,
+ **kwargs
+ ) -> AsyncIterable["models.LabelingJobResourceArmPaginatedResult"]:
+ """Lists labeling jobs in the workspace.
+
+ Lists labeling jobs in the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param count: Number of labeling jobs to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.LabelingJobResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('LabelingJobResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs'} # type: ignore
+
+ async def delete(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a labeling job.
+
+ Delete a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ async def get(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ include_job_instructions: Optional[bool] = None,
+ include_label_categories: Optional[bool] = None,
+ **kwargs
+ ) -> "models.LabelingJobResource":
+ """Gets a labeling job by name/id.
+
+ Gets a labeling job by name/id.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param include_job_instructions: Boolean value to indicate whether to include JobInstructions
+ in response.
+ :type include_job_instructions: bool
+ :param include_label_categories: Boolean value to indicate Whether to include LabelCategories
+ in response.
+ :type include_label_categories: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LabelingJobResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if include_job_instructions is not None:
+ query_parameters['includeJobInstructions'] = self._serialize.query("include_job_instructions", include_job_instructions, 'bool')
+ if include_label_categories is not None:
+ query_parameters['includeLabelCategories'] = self._serialize.query("include_label_categories", include_label_categories, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.LabelingJobResource",
+ **kwargs
+ ) -> "models.LabelingJobResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'LabelingJobResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.LabelingJobResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.LabelingJobResource"]:
+ """Creates or updates a labeling job (asynchronous).
+
+ Creates or updates a labeling job (asynchronous).
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: LabelingJob definition object.
+ :type body: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either LabelingJobResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ async def _export_labels_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ExportSummary",
+ **kwargs
+ ) -> Optional["models.ExportSummary"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ExportSummary"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._export_labels_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ExportSummary')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _export_labels_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
+
+ async def begin_export_labels(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ExportSummary",
+ **kwargs
+ ) -> AsyncLROPoller["models.ExportSummary"]:
+ """Export labels from a labeling job (asynchronous).
+
+ Export labels from a labeling job (asynchronous).
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The export summary.
+ :type body: ~azure_machine_learning_workspaces.models.ExportSummary
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ExportSummary]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ExportSummary"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._export_labels_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_export_labels.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
+
+ async def pause(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Pause a labeling job.
+
+ Pause a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.pause.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ pause.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause'} # type: ignore
+
+ async def _resume_initial(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resume_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _resume_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
+
+ async def begin_resume(
+ self,
+ id: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Resume a labeling job (asynchronous).
+
+ Resume a labeling job (asynchronous).
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._resume_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resume.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_containers_operations.py
new file mode 100644
index 00000000000..be32f2ed836
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_containers_operations.py
@@ -0,0 +1,333 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelContainersOperations:
+ """ModelContainersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ count: Optional[int] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ModelContainerResourceArmPaginatedResult"]:
+ """List model containers.
+
+ List model containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ModelContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ModelContainerResource":
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ModelContainerResource",
+ **kwargs
+ ) -> "models.ModelContainerResource":
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_versions_operations.py
new file mode 100644
index 00000000000..03e96f77490
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_model_versions_operations.py
@@ -0,0 +1,381 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelVersionsOperations:
+ """ModelVersionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ version: Optional[str] = None,
+ description: Optional[str] = None,
+ offset: Optional[int] = None,
+ tags: Optional[str] = None,
+ properties: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ModelVersionResourceArmPaginatedResult"]:
+ """List model versions.
+
+ List model versions.
+
+ :param name: Model name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param version: Model version.
+ :type version: str
+ :param description: Model description.
+ :type description: str
+ :param offset: Number of initial results to skip.
+ :type offset: int
+ :param tags: Comma-separated list of tag names (and optionally values). Example:
+ tag1,tag2=value2.
+ :type tags: str
+ :param properties: Comma-separated list of property names (and optionally values). Example:
+ prop1,prop2=value2.
+ :type properties: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ModelVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if version is not None:
+ query_parameters['version'] = self._serialize.query("version", version, 'str')
+ if description is not None:
+ query_parameters['description'] = self._serialize.query("description", description, 'str')
+ if offset is not None:
+ query_parameters['offset'] = self._serialize.query("offset", offset, 'int')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions'} # type: ignore
+
+ async def delete(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ async def get(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ModelVersionResource":
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ name: str,
+ version: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.ModelVersionResource",
+ **kwargs
+ ) -> "models.ModelVersionResource":
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_deployments_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_deployments_operations.py
new file mode 100644
index 00000000000..20ceee1d697
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_deployments_operations.py
@@ -0,0 +1,718 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineDeploymentsOperations:
+ """OnlineDeploymentsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ order_by: Optional[str] = None,
+ top: Optional[int] = None,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]:
+ """List Inference Endpoint Deployments.
+
+ List Inference Endpoint Deployments.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Top of list.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Delete Inference Endpoint Deployment (asynchronous).
+
+ Delete Inference Endpoint Deployment (asynchronous).
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def get(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.OnlineDeploymentTrackedResource":
+ """Get Inference Deployment Deployment.
+
+ Get Inference Deployment Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineDeploymentPartialTrackedResource",
+ **kwargs
+ ) -> Optional["models.OnlineDeploymentTrackedResource"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineDeploymentTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineDeploymentPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineDeploymentPartialTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineDeploymentTrackedResource"]:
+ """Update Online Deployment (asynchronous).
+
+ Update Online Deployment (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineDeploymentPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineDeploymentTrackedResource",
+ **kwargs
+ ) -> "models.OnlineDeploymentTrackedResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineDeploymentTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineDeploymentTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineDeploymentTrackedResource"]:
+ """Create or update Inference Endpoint Deployment (asynchronous).
+
+ Create or update Inference Endpoint Deployment (asynchronous).
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Inference Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ async def get_logs(
+ self,
+ endpoint_name: str,
+ deployment_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.DeploymentLogsRequest",
+ **kwargs
+ ) -> "models.DeploymentLogs":
+ """Polls an Endpoint operation.
+
+ Polls an Endpoint operation.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The name and identifier for the endpoint.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The request containing parameters for retrieving logs.
+ :type body: ~azure_machine_learning_workspaces.models.DeploymentLogsRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DeploymentLogs, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DeploymentLogs
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DeploymentLogs"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_logs.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DeploymentLogsRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DeploymentLogs', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_logs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_endpoints_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_endpoints_operations.py
new file mode 100644
index 00000000000..f5c56094b50
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_online_endpoints_operations.py
@@ -0,0 +1,898 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineEndpointsOperations:
+ """OnlineEndpointsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ name: Optional[str] = None,
+ count: Optional[int] = None,
+ compute_type: Optional[Union[str, "models.EndpointComputeType"]] = None,
+ skip: Optional[str] = None,
+ tags: Optional[str] = None,
+ properties: Optional[str] = None,
+ order_by: Optional[Union[str, "models.OrderString"]] = None,
+ **kwargs
+ ) -> AsyncIterable["models.OnlineEndpointTrackedResourceArmPaginatedResult"]:
+ """List Online Endpoints.
+
+ List Online Endpoints.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param name: Name of the endpoint.
+ :type name: str
+ :param count: Number of endpoints to be retrieved in a page of results.
+ :type count: int
+ :param compute_type: EndpointComputeType to be filtered by.
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param tags: A set of tags with which to filter the returned models. It is a comma separated
+ string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned models. It is a comma
+ separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param order_by: The option to order the response.
+ :type order_by: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if name is not None:
+ query_parameters['name'] = self._serialize.query("name", name, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if compute_type is not None:
+ query_parameters['computeType'] = self._serialize.query("compute_type", compute_type, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Delete Online Endpoint (asynchronous).
+
+ Delete Online Endpoint (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def get(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.OnlineEndpointTrackedResource":
+ """Get Online Endpoint.
+
+ Get Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineEndpointPartialTrackedResource",
+ **kwargs
+ ) -> Optional["models.OnlineEndpointTrackedResource"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineEndpointTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineEndpointPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.PartialOnlineEndpointPartialTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineEndpointTrackedResource"]:
+ """Update Online Endpoint (asynchronous).
+
+ Update Online Endpoint (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineEndpointPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineEndpointTrackedResource",
+ **kwargs
+ ) -> "models.OnlineEndpointTrackedResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineEndpointTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.OnlineEndpointTrackedResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.OnlineEndpointTrackedResource"]:
+ """Create or update Online Endpoint (asynchronous).
+
+ Create or update Online Endpoint (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ async def list_keys(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EndpointAuthKeys":
+ """List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthKeys, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthKeys"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthKeys', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys'} # type: ignore
+
+ async def _regenerate_keys_initial(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.RegenerateEndpointKeysRequest",
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._regenerate_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'RegenerateEndpointKeysRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _regenerate_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ async def begin_regenerate_keys(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ body: "models.RegenerateEndpointKeysRequest",
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous).
+
+ Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: RegenerateKeys request .
+ :type body: ~azure_machine_learning_workspaces.models.RegenerateEndpointKeysRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._regenerate_keys_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ async def get_token(
+ self,
+ endpoint_name: str,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.EndpointAuthToken":
+ """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthToken, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthToken
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthToken"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthToken', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
new file mode 100644
index 00000000000..ec4a4987c9a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations:
+ """Operations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.OperationListResult"]:
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..05eb4747bcf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,314 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations:
+ """PrivateEndpointConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.PrivateEndpointConnectionListResult"]:
+ """List all the private endpoint connections associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PrivateEndpointConnectionListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ properties: "models.PrivateEndpointConnection",
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> None:
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..cd33afa2558
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
@@ -0,0 +1,99 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations:
+ """PrivateLinkResourcesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.PrivateLinkResourceListResult":
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
new file mode 100644
index 00000000000..9d1fe521570
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
@@ -0,0 +1,176 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations:
+ """QuotasOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def update(
+ self,
+ location: str,
+ parameters: "models.QuotaUpdateParameters",
+ **kwargs
+ ) -> "models.UpdateWorkspaceQuotasResult":
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListWorkspaceQuotas"]:
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
new file mode 100644
index 00000000000..ee8c0189b7e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
@@ -0,0 +1,113 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations:
+ """UsagesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListUsagesResult"]:
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..af4c948c30a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,95 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations:
+ """VirtualMachineSizesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> "models.VirtualMachineSizeListResult":
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..5a5a25f50c3
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
@@ -0,0 +1,321 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations:
+ """WorkspaceConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ target: Optional[str] = None,
+ category: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedWorkspaceConnectionsList"]:
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ async def create(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ parameters: "models.WorkspaceConnection",
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Add a new workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..19d860f9b51
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations:
+ """WorkspaceFeaturesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListAmlUserFeatureResult"]:
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_skus_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_skus_operations.py
new file mode 100644
index 00000000000..088c86c46b2
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_skus_operations.py
@@ -0,0 +1,109 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceSkusOperations:
+ """WorkspaceSkusOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.SkuListResult"]:
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..41c3420ee37
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
@@ -0,0 +1,1021 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations:
+ """WorkspacesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.Workspace":
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> Optional["models.Workspace"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> AsyncLROPoller["models.Workspace"]:
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.WorkspaceUpdateParameters",
+ **kwargs
+ ) -> "models.Workspace":
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListWorkspaceKeysResult":
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ async def _resync_keys_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resync_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _resync_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ async def begin_resync_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._resync_keys_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def list_notebook_access_token(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.NotebookAccessTokenResult":
+ """return notebook access token and refresh token.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: NotebookAccessTokenResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.NotebookAccessTokenResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookAccessTokenResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_access_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken'} # type: ignore
+
+ async def _prepare_notebook_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> Optional["models.NotebookResourceInfo"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_notebook_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_notebook_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def begin_prepare_notebook(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller["models.NotebookResourceInfo"]:
+ """prepare_notebook.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._prepare_notebook_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare_notebook.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def list_storage_account_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListStorageAccountKeysResult":
+ """list_storage_account_keys.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListStorageAccountKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListStorageAccountKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListStorageAccountKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_storage_account_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_storage_account_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys'} # type: ignore
+
+ async def list_notebook_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListNotebookKeysResult":
+ """list_notebook_keys.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
new file mode 100644
index 00000000000..a74b8256fb6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
@@ -0,0 +1,931 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import AccountKeyDatastoreCredentials
+ from ._models_py3 import AccountKeyDatastoreSecrets
+ from ._models_py3 import Aks
+ from ._models_py3 import AksComputeSecrets
+ from ._models_py3 import AksNetworkingConfiguration
+ from ._models_py3 import AksProperties
+ from ._models_py3 import AmlCompute
+ from ._models_py3 import AmlComputeNodeInformation
+ from ._models_py3 import AmlComputeNodesInformation
+ from ._models_py3 import AmlComputeProperties
+ from ._models_py3 import AmlToken
+ from ._models_py3 import AmlUserFeature
+ from ._models_py3 import AssetReferenceBase
+ from ._models_py3 import AssignedUser
+ from ._models_py3 import AutoPauseProperties
+ from ._models_py3 import AutoScaleProperties
+ from ._models_py3 import AutoScaleSettings
+ from ._models_py3 import AzureBlobContents
+ from ._models_py3 import AzureDataLakeGen1Contents
+ from ._models_py3 import AzureDataLakeGen2Contents
+ from ._models_py3 import AzureFileContents
+ from ._models_py3 import AzurePostgreSqlContents
+ from ._models_py3 import AzureSqlDatabaseContents
+ from ._models_py3 import BanditPolicy
+ from ._models_py3 import BatchDeployment
+ from ._models_py3 import BatchDeploymentTrackedResource
+ from ._models_py3 import BatchDeploymentTrackedResourceArmPaginatedResult
+ from ._models_py3 import BatchEndpoint
+ from ._models_py3 import BatchEndpointTrackedResource
+ from ._models_py3 import BatchEndpointTrackedResourceArmPaginatedResult
+ from ._models_py3 import BatchOutputConfiguration
+ from ._models_py3 import BatchRetrySettings
+ from ._models_py3 import CertificateDatastoreCredentials
+ from ._models_py3 import CertificateDatastoreSecrets
+ from ._models_py3 import ClusterUpdateParameters
+ from ._models_py3 import CocoExportSummary
+ from ._models_py3 import CodeConfiguration
+ from ._models_py3 import CodeContainer
+ from ._models_py3 import CodeContainerResource
+ from ._models_py3 import CodeContainerResourceArmPaginatedResult
+ from ._models_py3 import CodeVersion
+ from ._models_py3 import CodeVersionResource
+ from ._models_py3 import CodeVersionResourceArmPaginatedResult
+ from ._models_py3 import CommandJob
+ from ._models_py3 import Components1D3SwueSchemasComputeresourceAllof1
+ from ._models_py3 import Compute
+ from ._models_py3 import ComputeConfiguration
+ from ._models_py3 import ComputeInstance
+ from ._models_py3 import ComputeInstanceApplication
+ from ._models_py3 import ComputeInstanceConnectivityEndpoints
+ from ._models_py3 import ComputeInstanceCreatedBy
+ from ._models_py3 import ComputeInstanceLastOperation
+ from ._models_py3 import ComputeInstanceProperties
+ from ._models_py3 import ComputeInstanceSshSettings
+ from ._models_py3 import ComputeNodesInformation
+ from ._models_py3 import ComputeResource
+ from ._models_py3 import ComputeSchedules
+ from ._models_py3 import ComputeSecrets
+ from ._models_py3 import ComputeStartStopSchedule
+ from ._models_py3 import ContainerResourceRequirements
+ from ._models_py3 import CosmosDbSettings
+ from ._models_py3 import Cron
+ from ._models_py3 import CsvExportSummary
+ from ._models_py3 import DataContainer
+ from ._models_py3 import DataContainerResource
+ from ._models_py3 import DataContainerResourceArmPaginatedResult
+ from ._models_py3 import DataFactory
+ from ._models_py3 import DataLakeAnalytics
+ from ._models_py3 import DataLakeAnalyticsProperties
+ from ._models_py3 import DataPathAssetReference
+ from ._models_py3 import DataVersion
+ from ._models_py3 import DataVersionResource
+ from ._models_py3 import DataVersionResourceArmPaginatedResult
+ from ._models_py3 import Databricks
+ from ._models_py3 import DatabricksComputeSecrets
+ from ._models_py3 import DatabricksProperties
+ from ._models_py3 import DatasetExportSummary
+ from ._models_py3 import DatastoreContents
+ from ._models_py3 import DatastoreCredentials
+ from ._models_py3 import DatastoreProperties
+ from ._models_py3 import DatastorePropertiesResource
+ from ._models_py3 import DatastorePropertiesResourceArmPaginatedResult
+ from ._models_py3 import DatastoreSecrets
+ from ._models_py3 import DeploymentLogs
+ from ._models_py3 import DeploymentLogsRequest
+ from ._models_py3 import DistributionConfiguration
+ from ._models_py3 import DockerBuild
+ from ._models_py3 import DockerImage
+ from ._models_py3 import DockerImagePlatform
+ from ._models_py3 import DockerSpecification
+ from ._models_py3 import EarlyTerminationPolicy
+ from ._models_py3 import EncryptionProperty
+ from ._models_py3 import EndpointAuthKeys
+ from ._models_py3 import EndpointAuthToken
+ from ._models_py3 import EnvironmentContainer
+ from ._models_py3 import EnvironmentContainerResource
+ from ._models_py3 import EnvironmentContainerResourceArmPaginatedResult
+ from ._models_py3 import EnvironmentSpecificationVersion
+ from ._models_py3 import EnvironmentSpecificationVersionResource
+ from ._models_py3 import EnvironmentSpecificationVersionResourceArmPaginatedResult
+ from ._models_py3 import ErrorAdditionalInfo
+ from ._models_py3 import ErrorDetail
+ from ._models_py3 import ErrorResponse
+ from ._models_py3 import EstimatedVmPrice
+ from ._models_py3 import EstimatedVmPrices
+ from ._models_py3 import ExportSummary
+ from ._models_py3 import FlavorData
+ from ._models_py3 import GlusterFsContents
+ from ._models_py3 import HdInsight
+ from ._models_py3 import HdInsightProperties
+ from ._models_py3 import IdAssetReference
+ from ._models_py3 import Identity
+ from ._models_py3 import IdentityConfiguration
+ from ._models_py3 import IdentityForCmk
+ from ._models_py3 import InferenceContainerProperties
+ from ._models_py3 import InputDataBinding
+ from ._models_py3 import JobBase
+ from ._models_py3 import JobBaseResource
+ from ._models_py3 import JobBaseResourceArmPaginatedResult
+ from ._models_py3 import JobEndpoint
+ from ._models_py3 import JobOutput
+ from ._models_py3 import K8SOnlineDeployment
+ from ._models_py3 import KeyVaultProperties
+ from ._models_py3 import LabelCategory
+ from ._models_py3 import LabelClass
+ from ._models_py3 import LabelingDatasetConfiguration
+ from ._models_py3 import LabelingJob
+ from ._models_py3 import LabelingJobImageProperties
+ from ._models_py3 import LabelingJobInstructions
+ from ._models_py3 import LabelingJobMediaProperties
+ from ._models_py3 import LabelingJobResource
+ from ._models_py3 import LabelingJobResourceArmPaginatedResult
+ from ._models_py3 import LabelingJobTextProperties
+ from ._models_py3 import LinkedInfo
+ from ._models_py3 import ListAmlUserFeatureResult
+ from ._models_py3 import ListNotebookKeysResult
+ from ._models_py3 import ListStorageAccountKeysResult
+ from ._models_py3 import ListUsagesResult
+ from ._models_py3 import ListWorkspaceKeysResult
+ from ._models_py3 import ListWorkspaceQuotas
+ from ._models_py3 import ManagedIdentity
+ from ._models_py3 import ManagedOnlineDeployment
+ from ._models_py3 import ManualScaleSettings
+ from ._models_py3 import MedianStoppingPolicy
+ from ._models_py3 import MlAssistConfiguration
+ from ._models_py3 import ModelContainer
+ from ._models_py3 import ModelContainerResource
+ from ._models_py3 import ModelContainerResourceArmPaginatedResult
+ from ._models_py3 import ModelVersion
+ from ._models_py3 import ModelVersionResource
+ from ._models_py3 import ModelVersionResourceArmPaginatedResult
+ from ._models_py3 import Mpi
+ from ._models_py3 import NodeStateCounts
+ from ._models_py3 import NoneDatastoreCredentials
+ from ._models_py3 import NoneDatastoreSecrets
+ from ._models_py3 import NotebookAccessTokenResult
+ from ._models_py3 import NotebookPreparationError
+ from ._models_py3 import NotebookResourceInfo
+ from ._models_py3 import Objective
+ from ._models_py3 import OnlineDeployment
+ from ._models_py3 import OnlineDeploymentTrackedResource
+ from ._models_py3 import OnlineDeploymentTrackedResourceArmPaginatedResult
+ from ._models_py3 import OnlineEndpoint
+ from ._models_py3 import OnlineEndpointTrackedResource
+ from ._models_py3 import OnlineEndpointTrackedResourceArmPaginatedResult
+ from ._models_py3 import OnlineRequestSettings
+ from ._models_py3 import OnlineScaleSettings
+ from ._models_py3 import Operation
+ from ._models_py3 import OperationDisplay
+ from ._models_py3 import OperationListResult
+ from ._models_py3 import OutputDataBinding
+ from ._models_py3 import OutputPathAssetReference
+ from ._models_py3 import PaginatedComputeResourcesList
+ from ._models_py3 import PaginatedWorkspaceConnectionsList
+ from ._models_py3 import PartialAksOnlineDeployment
+ from ._models_py3 import PartialBatchDeployment
+ from ._models_py3 import PartialBatchDeploymentPartialTrackedResource
+ from ._models_py3 import PartialBatchEndpoint
+ from ._models_py3 import PartialBatchEndpointPartialTrackedResource
+ from ._models_py3 import PartialManagedOnlineDeployment
+ from ._models_py3 import PartialOnlineDeployment
+ from ._models_py3 import PartialOnlineDeploymentPartialTrackedResource
+ from ._models_py3 import PartialOnlineEndpoint
+ from ._models_py3 import PartialOnlineEndpointPartialTrackedResource
+ from ._models_py3 import Password
+ from ._models_py3 import PersonalComputeInstanceSettings
+ from ._models_py3 import PrivateEndpoint
+ from ._models_py3 import PrivateEndpointConnection
+ from ._models_py3 import PrivateEndpointConnectionListResult
+ from ._models_py3 import PrivateLinkResource
+ from ._models_py3 import PrivateLinkResourceListResult
+ from ._models_py3 import PrivateLinkServiceConnectionState
+ from ._models_py3 import ProbeSettings
+ from ._models_py3 import ProgressMetrics
+ from ._models_py3 import PyTorch
+ from ._models_py3 import QuotaBaseProperties
+ from ._models_py3 import QuotaUpdateParameters
+ from ._models_py3 import Recurrence
+ from ._models_py3 import RecurrenceSchedule
+ from ._models_py3 import RegenerateEndpointKeysRequest
+ from ._models_py3 import RegistryListCredentialsResult
+ from ._models_py3 import Resource
+ from ._models_py3 import ResourceId
+ from ._models_py3 import ResourceIdentity
+ from ._models_py3 import ResourceName
+ from ._models_py3 import ResourceQuota
+ from ._models_py3 import ResourceSkuLocationInfo
+ from ._models_py3 import ResourceSkuZoneDetails
+ from ._models_py3 import Restriction
+ from ._models_py3 import Route
+ from ._models_py3 import SasDatastoreCredentials
+ from ._models_py3 import SasDatastoreSecrets
+ from ._models_py3 import ScaleSettings
+ from ._models_py3 import ScriptReference
+ from ._models_py3 import ScriptsToExecute
+ from ._models_py3 import ServiceManagedResourcesSettings
+ from ._models_py3 import ServicePrincipalCredentials
+ from ._models_py3 import ServicePrincipalDatastoreCredentials
+ from ._models_py3 import ServicePrincipalDatastoreSecrets
+ from ._models_py3 import SetupScripts
+ from ._models_py3 import SharedPrivateLinkResource
+ from ._models_py3 import Sku
+ from ._models_py3 import SkuCapability
+ from ._models_py3 import SkuListResult
+ from ._models_py3 import SqlAdminDatastoreCredentials
+ from ._models_py3 import SqlAdminDatastoreSecrets
+ from ._models_py3 import SslConfiguration
+ from ._models_py3 import StatusMessage
+ from ._models_py3 import SweepJob
+ from ._models_py3 import SynapseSpark
+ from ._models_py3 import SynapseSparkPoolProperties
+ from ._models_py3 import SynapseSparkPoolPropertiesautogenerated
+ from ._models_py3 import SystemData
+ from ._models_py3 import SystemService
+ from ._models_py3 import TensorFlow
+ from ._models_py3 import TrackedResource
+ from ._models_py3 import TrialComponent
+ from ._models_py3 import TruncationSelectionPolicy
+ from ._models_py3 import UpdateWorkspaceQuotas
+ from ._models_py3 import UpdateWorkspaceQuotasResult
+ from ._models_py3 import Usage
+ from ._models_py3 import UsageName
+ from ._models_py3 import UserAccountCredentials
+ from ._models_py3 import UserAssignedIdentity
+ from ._models_py3 import UserAssignedIdentityMeta
+ from ._models_py3 import VirtualMachine
+ from ._models_py3 import VirtualMachineImage
+ from ._models_py3 import VirtualMachineProperties
+ from ._models_py3 import VirtualMachineSecrets
+ from ._models_py3 import VirtualMachineSize
+ from ._models_py3 import VirtualMachineSizeListResult
+ from ._models_py3 import VirtualMachineSshCredentials
+ from ._models_py3 import Workspace
+ from ._models_py3 import WorkspaceConnection
+ from ._models_py3 import WorkspaceListResult
+ from ._models_py3 import WorkspaceSku
+ from ._models_py3 import WorkspaceUpdateParameters
+except (SyntaxError, ImportError):
+ from ._models import AccountKeyDatastoreCredentials # type: ignore
+ from ._models import AccountKeyDatastoreSecrets # type: ignore
+ from ._models import Aks # type: ignore
+ from ._models import AksComputeSecrets # type: ignore
+ from ._models import AksNetworkingConfiguration # type: ignore
+ from ._models import AksProperties # type: ignore
+ from ._models import AmlCompute # type: ignore
+ from ._models import AmlComputeNodeInformation # type: ignore
+ from ._models import AmlComputeNodesInformation # type: ignore
+ from ._models import AmlComputeProperties # type: ignore
+ from ._models import AmlToken # type: ignore
+ from ._models import AmlUserFeature # type: ignore
+ from ._models import AssetReferenceBase # type: ignore
+ from ._models import AssignedUser # type: ignore
+ from ._models import AutoPauseProperties # type: ignore
+ from ._models import AutoScaleProperties # type: ignore
+ from ._models import AutoScaleSettings # type: ignore
+ from ._models import AzureBlobContents # type: ignore
+ from ._models import AzureDataLakeGen1Contents # type: ignore
+ from ._models import AzureDataLakeGen2Contents # type: ignore
+ from ._models import AzureFileContents # type: ignore
+ from ._models import AzurePostgreSqlContents # type: ignore
+ from ._models import AzureSqlDatabaseContents # type: ignore
+ from ._models import BanditPolicy # type: ignore
+ from ._models import BatchDeployment # type: ignore
+ from ._models import BatchDeploymentTrackedResource # type: ignore
+ from ._models import BatchDeploymentTrackedResourceArmPaginatedResult # type: ignore
+ from ._models import BatchEndpoint # type: ignore
+ from ._models import BatchEndpointTrackedResource # type: ignore
+ from ._models import BatchEndpointTrackedResourceArmPaginatedResult # type: ignore
+ from ._models import BatchOutputConfiguration # type: ignore
+ from ._models import BatchRetrySettings # type: ignore
+ from ._models import CertificateDatastoreCredentials # type: ignore
+ from ._models import CertificateDatastoreSecrets # type: ignore
+ from ._models import ClusterUpdateParameters # type: ignore
+ from ._models import CocoExportSummary # type: ignore
+ from ._models import CodeConfiguration # type: ignore
+ from ._models import CodeContainer # type: ignore
+ from ._models import CodeContainerResource # type: ignore
+ from ._models import CodeContainerResourceArmPaginatedResult # type: ignore
+ from ._models import CodeVersion # type: ignore
+ from ._models import CodeVersionResource # type: ignore
+ from ._models import CodeVersionResourceArmPaginatedResult # type: ignore
+ from ._models import CommandJob # type: ignore
+ from ._models import Components1D3SwueSchemasComputeresourceAllof1 # type: ignore
+ from ._models import Compute # type: ignore
+ from ._models import ComputeConfiguration # type: ignore
+ from ._models import ComputeInstance # type: ignore
+ from ._models import ComputeInstanceApplication # type: ignore
+ from ._models import ComputeInstanceConnectivityEndpoints # type: ignore
+ from ._models import ComputeInstanceCreatedBy # type: ignore
+ from ._models import ComputeInstanceLastOperation # type: ignore
+ from ._models import ComputeInstanceProperties # type: ignore
+ from ._models import ComputeInstanceSshSettings # type: ignore
+ from ._models import ComputeNodesInformation # type: ignore
+ from ._models import ComputeResource # type: ignore
+ from ._models import ComputeSchedules # type: ignore
+ from ._models import ComputeSecrets # type: ignore
+ from ._models import ComputeStartStopSchedule # type: ignore
+ from ._models import ContainerResourceRequirements # type: ignore
+ from ._models import CosmosDbSettings # type: ignore
+ from ._models import Cron # type: ignore
+ from ._models import CsvExportSummary # type: ignore
+ from ._models import DataContainer # type: ignore
+ from ._models import DataContainerResource # type: ignore
+ from ._models import DataContainerResourceArmPaginatedResult # type: ignore
+ from ._models import DataFactory # type: ignore
+ from ._models import DataLakeAnalytics # type: ignore
+ from ._models import DataLakeAnalyticsProperties # type: ignore
+ from ._models import DataPathAssetReference # type: ignore
+ from ._models import DataVersion # type: ignore
+ from ._models import DataVersionResource # type: ignore
+ from ._models import DataVersionResourceArmPaginatedResult # type: ignore
+ from ._models import Databricks # type: ignore
+ from ._models import DatabricksComputeSecrets # type: ignore
+ from ._models import DatabricksProperties # type: ignore
+ from ._models import DatasetExportSummary # type: ignore
+ from ._models import DatastoreContents # type: ignore
+ from ._models import DatastoreCredentials # type: ignore
+ from ._models import DatastoreProperties # type: ignore
+ from ._models import DatastorePropertiesResource # type: ignore
+ from ._models import DatastorePropertiesResourceArmPaginatedResult # type: ignore
+ from ._models import DatastoreSecrets # type: ignore
+ from ._models import DeploymentLogs # type: ignore
+ from ._models import DeploymentLogsRequest # type: ignore
+ from ._models import DistributionConfiguration # type: ignore
+ from ._models import DockerBuild # type: ignore
+ from ._models import DockerImage # type: ignore
+ from ._models import DockerImagePlatform # type: ignore
+ from ._models import DockerSpecification # type: ignore
+ from ._models import EarlyTerminationPolicy # type: ignore
+ from ._models import EncryptionProperty # type: ignore
+ from ._models import EndpointAuthKeys # type: ignore
+ from ._models import EndpointAuthToken # type: ignore
+ from ._models import EnvironmentContainer # type: ignore
+ from ._models import EnvironmentContainerResource # type: ignore
+ from ._models import EnvironmentContainerResourceArmPaginatedResult # type: ignore
+ from ._models import EnvironmentSpecificationVersion # type: ignore
+ from ._models import EnvironmentSpecificationVersionResource # type: ignore
+ from ._models import EnvironmentSpecificationVersionResourceArmPaginatedResult # type: ignore
+ from ._models import ErrorAdditionalInfo # type: ignore
+ from ._models import ErrorDetail # type: ignore
+ from ._models import ErrorResponse # type: ignore
+ from ._models import EstimatedVmPrice # type: ignore
+ from ._models import EstimatedVmPrices # type: ignore
+ from ._models import ExportSummary # type: ignore
+ from ._models import FlavorData # type: ignore
+ from ._models import GlusterFsContents # type: ignore
+ from ._models import HdInsight # type: ignore
+ from ._models import HdInsightProperties # type: ignore
+ from ._models import IdAssetReference # type: ignore
+ from ._models import Identity # type: ignore
+ from ._models import IdentityConfiguration # type: ignore
+ from ._models import IdentityForCmk # type: ignore
+ from ._models import InferenceContainerProperties # type: ignore
+ from ._models import InputDataBinding # type: ignore
+ from ._models import JobBase # type: ignore
+ from ._models import JobBaseResource # type: ignore
+ from ._models import JobBaseResourceArmPaginatedResult # type: ignore
+ from ._models import JobEndpoint # type: ignore
+ from ._models import JobOutput # type: ignore
+ from ._models import K8SOnlineDeployment # type: ignore
+ from ._models import KeyVaultProperties # type: ignore
+ from ._models import LabelCategory # type: ignore
+ from ._models import LabelClass # type: ignore
+ from ._models import LabelingDatasetConfiguration # type: ignore
+ from ._models import LabelingJob # type: ignore
+ from ._models import LabelingJobImageProperties # type: ignore
+ from ._models import LabelingJobInstructions # type: ignore
+ from ._models import LabelingJobMediaProperties # type: ignore
+ from ._models import LabelingJobResource # type: ignore
+ from ._models import LabelingJobResourceArmPaginatedResult # type: ignore
+ from ._models import LabelingJobTextProperties # type: ignore
+ from ._models import LinkedInfo # type: ignore
+ from ._models import ListAmlUserFeatureResult # type: ignore
+ from ._models import ListNotebookKeysResult # type: ignore
+ from ._models import ListStorageAccountKeysResult # type: ignore
+ from ._models import ListUsagesResult # type: ignore
+ from ._models import ListWorkspaceKeysResult # type: ignore
+ from ._models import ListWorkspaceQuotas # type: ignore
+ from ._models import ManagedIdentity # type: ignore
+ from ._models import ManagedOnlineDeployment # type: ignore
+ from ._models import ManualScaleSettings # type: ignore
+ from ._models import MedianStoppingPolicy # type: ignore
+ from ._models import MlAssistConfiguration # type: ignore
+ from ._models import ModelContainer # type: ignore
+ from ._models import ModelContainerResource # type: ignore
+ from ._models import ModelContainerResourceArmPaginatedResult # type: ignore
+ from ._models import ModelVersion # type: ignore
+ from ._models import ModelVersionResource # type: ignore
+ from ._models import ModelVersionResourceArmPaginatedResult # type: ignore
+ from ._models import Mpi # type: ignore
+ from ._models import NodeStateCounts # type: ignore
+ from ._models import NoneDatastoreCredentials # type: ignore
+ from ._models import NoneDatastoreSecrets # type: ignore
+ from ._models import NotebookAccessTokenResult # type: ignore
+ from ._models import NotebookPreparationError # type: ignore
+ from ._models import NotebookResourceInfo # type: ignore
+ from ._models import Objective # type: ignore
+ from ._models import OnlineDeployment # type: ignore
+ from ._models import OnlineDeploymentTrackedResource # type: ignore
+ from ._models import OnlineDeploymentTrackedResourceArmPaginatedResult # type: ignore
+ from ._models import OnlineEndpoint # type: ignore
+ from ._models import OnlineEndpointTrackedResource # type: ignore
+ from ._models import OnlineEndpointTrackedResourceArmPaginatedResult # type: ignore
+ from ._models import OnlineRequestSettings # type: ignore
+ from ._models import OnlineScaleSettings # type: ignore
+ from ._models import Operation # type: ignore
+ from ._models import OperationDisplay # type: ignore
+ from ._models import OperationListResult # type: ignore
+ from ._models import OutputDataBinding # type: ignore
+ from ._models import OutputPathAssetReference # type: ignore
+ from ._models import PaginatedComputeResourcesList # type: ignore
+ from ._models import PaginatedWorkspaceConnectionsList # type: ignore
+ from ._models import PartialAksOnlineDeployment # type: ignore
+ from ._models import PartialBatchDeployment # type: ignore
+ from ._models import PartialBatchDeploymentPartialTrackedResource # type: ignore
+ from ._models import PartialBatchEndpoint # type: ignore
+ from ._models import PartialBatchEndpointPartialTrackedResource # type: ignore
+ from ._models import PartialManagedOnlineDeployment # type: ignore
+ from ._models import PartialOnlineDeployment # type: ignore
+ from ._models import PartialOnlineDeploymentPartialTrackedResource # type: ignore
+ from ._models import PartialOnlineEndpoint # type: ignore
+ from ._models import PartialOnlineEndpointPartialTrackedResource # type: ignore
+ from ._models import Password # type: ignore
+ from ._models import PersonalComputeInstanceSettings # type: ignore
+ from ._models import PrivateEndpoint # type: ignore
+ from ._models import PrivateEndpointConnection # type: ignore
+ from ._models import PrivateEndpointConnectionListResult # type: ignore
+ from ._models import PrivateLinkResource # type: ignore
+ from ._models import PrivateLinkResourceListResult # type: ignore
+ from ._models import PrivateLinkServiceConnectionState # type: ignore
+ from ._models import ProbeSettings # type: ignore
+ from ._models import ProgressMetrics # type: ignore
+ from ._models import PyTorch # type: ignore
+ from ._models import QuotaBaseProperties # type: ignore
+ from ._models import QuotaUpdateParameters # type: ignore
+ from ._models import Recurrence # type: ignore
+ from ._models import RecurrenceSchedule # type: ignore
+ from ._models import RegenerateEndpointKeysRequest # type: ignore
+ from ._models import RegistryListCredentialsResult # type: ignore
+ from ._models import Resource # type: ignore
+ from ._models import ResourceId # type: ignore
+ from ._models import ResourceIdentity # type: ignore
+ from ._models import ResourceName # type: ignore
+ from ._models import ResourceQuota # type: ignore
+ from ._models import ResourceSkuLocationInfo # type: ignore
+ from ._models import ResourceSkuZoneDetails # type: ignore
+ from ._models import Restriction # type: ignore
+ from ._models import Route # type: ignore
+ from ._models import SasDatastoreCredentials # type: ignore
+ from ._models import SasDatastoreSecrets # type: ignore
+ from ._models import ScaleSettings # type: ignore
+ from ._models import ScriptReference # type: ignore
+ from ._models import ScriptsToExecute # type: ignore
+ from ._models import ServiceManagedResourcesSettings # type: ignore
+ from ._models import ServicePrincipalCredentials # type: ignore
+ from ._models import ServicePrincipalDatastoreCredentials # type: ignore
+ from ._models import ServicePrincipalDatastoreSecrets # type: ignore
+ from ._models import SetupScripts # type: ignore
+ from ._models import SharedPrivateLinkResource # type: ignore
+ from ._models import Sku # type: ignore
+ from ._models import SkuCapability # type: ignore
+ from ._models import SkuListResult # type: ignore
+ from ._models import SqlAdminDatastoreCredentials # type: ignore
+ from ._models import SqlAdminDatastoreSecrets # type: ignore
+ from ._models import SslConfiguration # type: ignore
+ from ._models import StatusMessage # type: ignore
+ from ._models import SweepJob # type: ignore
+ from ._models import SynapseSpark # type: ignore
+ from ._models import SynapseSparkPoolProperties # type: ignore
+ from ._models import SynapseSparkPoolPropertiesautogenerated # type: ignore
+ from ._models import SystemData # type: ignore
+ from ._models import SystemService # type: ignore
+ from ._models import TensorFlow # type: ignore
+ from ._models import TrackedResource # type: ignore
+ from ._models import TrialComponent # type: ignore
+ from ._models import TruncationSelectionPolicy # type: ignore
+ from ._models import UpdateWorkspaceQuotas # type: ignore
+ from ._models import UpdateWorkspaceQuotasResult # type: ignore
+ from ._models import Usage # type: ignore
+ from ._models import UsageName # type: ignore
+ from ._models import UserAccountCredentials # type: ignore
+ from ._models import UserAssignedIdentity # type: ignore
+ from ._models import UserAssignedIdentityMeta # type: ignore
+ from ._models import VirtualMachine # type: ignore
+ from ._models import VirtualMachineImage # type: ignore
+ from ._models import VirtualMachineProperties # type: ignore
+ from ._models import VirtualMachineSecrets # type: ignore
+ from ._models import VirtualMachineSize # type: ignore
+ from ._models import VirtualMachineSizeListResult # type: ignore
+ from ._models import VirtualMachineSshCredentials # type: ignore
+ from ._models import Workspace # type: ignore
+ from ._models import WorkspaceConnection # type: ignore
+ from ._models import WorkspaceListResult # type: ignore
+ from ._models import WorkspaceSku # type: ignore
+ from ._models import WorkspaceUpdateParameters # type: ignore
+
+from ._azure_machine_learning_workspaces_enums import (
+ AllocationState,
+ ApplicationSharingPolicy,
+ BatchLoggingLevel,
+ BatchOutputAction,
+ BillingCurrency,
+ ClusterPurpose,
+ ComputeInstanceAuthorizationType,
+ ComputeInstanceState,
+ ComputePowerAction,
+ ComputeType,
+ ContainerType,
+ ContentsType,
+ CreatedByType,
+ CredentialsType,
+ DataBindingMode,
+ DatasetType,
+ DaysOfWeek,
+ DeploymentProvisioningState,
+ DistributionType,
+ DockerSpecificationType,
+ EarlyTerminationPolicyType,
+ EncryptionStatus,
+ EndpointAuthMode,
+ EndpointComputeType,
+ EndpointProvisioningState,
+ EnvironmentSpecificationType,
+ ExportFormatType,
+ Goal,
+ IdentityConfigurationType,
+ ImageAnnotationType,
+ JobProvisioningState,
+ JobStatus,
+ JobType,
+ KeyType,
+ LoadBalancerType,
+ MediaType,
+ NodeState,
+ OperatingSystemType,
+ OperationName,
+ OperationStatus,
+ OrderString,
+ OriginType,
+ OsType,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateEndpointServiceConnectionStatus,
+ ProvisioningState,
+ ProvisioningStatus,
+ QuotaUnit,
+ ReasonCode,
+ RecurrenceFrequency,
+ ReferenceType,
+ RemoteLoginPortPublicAccess,
+ ResourceIdentityAssignment,
+ ResourceIdentityType,
+ SamplingAlgorithm,
+ ScaleType,
+ ScheduleStatus,
+ ScheduleType,
+ SecretsType,
+ SshPublicAccess,
+ SslConfigurationStatus,
+ Status,
+ StatusMessageLevel,
+ TextAnnotationType,
+ TriggerType,
+ UnderlyingResourceAction,
+ UnitOfMeasure,
+ UsageUnit,
+ ValueFormat,
+ VmPriceOsType,
+ VmPriority,
+ VmTier,
+)
+
+__all__ = [
+ 'AccountKeyDatastoreCredentials',
+ 'AccountKeyDatastoreSecrets',
+ 'Aks',
+ 'AksComputeSecrets',
+ 'AksNetworkingConfiguration',
+ 'AksProperties',
+ 'AmlCompute',
+ 'AmlComputeNodeInformation',
+ 'AmlComputeNodesInformation',
+ 'AmlComputeProperties',
+ 'AmlToken',
+ 'AmlUserFeature',
+ 'AssetReferenceBase',
+ 'AssignedUser',
+ 'AutoPauseProperties',
+ 'AutoScaleProperties',
+ 'AutoScaleSettings',
+ 'AzureBlobContents',
+ 'AzureDataLakeGen1Contents',
+ 'AzureDataLakeGen2Contents',
+ 'AzureFileContents',
+ 'AzurePostgreSqlContents',
+ 'AzureSqlDatabaseContents',
+ 'BanditPolicy',
+ 'BatchDeployment',
+ 'BatchDeploymentTrackedResource',
+ 'BatchDeploymentTrackedResourceArmPaginatedResult',
+ 'BatchEndpoint',
+ 'BatchEndpointTrackedResource',
+ 'BatchEndpointTrackedResourceArmPaginatedResult',
+ 'BatchOutputConfiguration',
+ 'BatchRetrySettings',
+ 'CertificateDatastoreCredentials',
+ 'CertificateDatastoreSecrets',
+ 'ClusterUpdateParameters',
+ 'CocoExportSummary',
+ 'CodeConfiguration',
+ 'CodeContainer',
+ 'CodeContainerResource',
+ 'CodeContainerResourceArmPaginatedResult',
+ 'CodeVersion',
+ 'CodeVersionResource',
+ 'CodeVersionResourceArmPaginatedResult',
+ 'CommandJob',
+ 'Components1D3SwueSchemasComputeresourceAllof1',
+ 'Compute',
+ 'ComputeConfiguration',
+ 'ComputeInstance',
+ 'ComputeInstanceApplication',
+ 'ComputeInstanceConnectivityEndpoints',
+ 'ComputeInstanceCreatedBy',
+ 'ComputeInstanceLastOperation',
+ 'ComputeInstanceProperties',
+ 'ComputeInstanceSshSettings',
+ 'ComputeNodesInformation',
+ 'ComputeResource',
+ 'ComputeSchedules',
+ 'ComputeSecrets',
+ 'ComputeStartStopSchedule',
+ 'ContainerResourceRequirements',
+ 'CosmosDbSettings',
+ 'Cron',
+ 'CsvExportSummary',
+ 'DataContainer',
+ 'DataContainerResource',
+ 'DataContainerResourceArmPaginatedResult',
+ 'DataFactory',
+ 'DataLakeAnalytics',
+ 'DataLakeAnalyticsProperties',
+ 'DataPathAssetReference',
+ 'DataVersion',
+ 'DataVersionResource',
+ 'DataVersionResourceArmPaginatedResult',
+ 'Databricks',
+ 'DatabricksComputeSecrets',
+ 'DatabricksProperties',
+ 'DatasetExportSummary',
+ 'DatastoreContents',
+ 'DatastoreCredentials',
+ 'DatastoreProperties',
+ 'DatastorePropertiesResource',
+ 'DatastorePropertiesResourceArmPaginatedResult',
+ 'DatastoreSecrets',
+ 'DeploymentLogs',
+ 'DeploymentLogsRequest',
+ 'DistributionConfiguration',
+ 'DockerBuild',
+ 'DockerImage',
+ 'DockerImagePlatform',
+ 'DockerSpecification',
+ 'EarlyTerminationPolicy',
+ 'EncryptionProperty',
+ 'EndpointAuthKeys',
+ 'EndpointAuthToken',
+ 'EnvironmentContainer',
+ 'EnvironmentContainerResource',
+ 'EnvironmentContainerResourceArmPaginatedResult',
+ 'EnvironmentSpecificationVersion',
+ 'EnvironmentSpecificationVersionResource',
+ 'EnvironmentSpecificationVersionResourceArmPaginatedResult',
+ 'ErrorAdditionalInfo',
+ 'ErrorDetail',
+ 'ErrorResponse',
+ 'EstimatedVmPrice',
+ 'EstimatedVmPrices',
+ 'ExportSummary',
+ 'FlavorData',
+ 'GlusterFsContents',
+ 'HdInsight',
+ 'HdInsightProperties',
+ 'IdAssetReference',
+ 'Identity',
+ 'IdentityConfiguration',
+ 'IdentityForCmk',
+ 'InferenceContainerProperties',
+ 'InputDataBinding',
+ 'JobBase',
+ 'JobBaseResource',
+ 'JobBaseResourceArmPaginatedResult',
+ 'JobEndpoint',
+ 'JobOutput',
+ 'K8SOnlineDeployment',
+ 'KeyVaultProperties',
+ 'LabelCategory',
+ 'LabelClass',
+ 'LabelingDatasetConfiguration',
+ 'LabelingJob',
+ 'LabelingJobImageProperties',
+ 'LabelingJobInstructions',
+ 'LabelingJobMediaProperties',
+ 'LabelingJobResource',
+ 'LabelingJobResourceArmPaginatedResult',
+ 'LabelingJobTextProperties',
+ 'LinkedInfo',
+ 'ListAmlUserFeatureResult',
+ 'ListNotebookKeysResult',
+ 'ListStorageAccountKeysResult',
+ 'ListUsagesResult',
+ 'ListWorkspaceKeysResult',
+ 'ListWorkspaceQuotas',
+ 'ManagedIdentity',
+ 'ManagedOnlineDeployment',
+ 'ManualScaleSettings',
+ 'MedianStoppingPolicy',
+ 'MlAssistConfiguration',
+ 'ModelContainer',
+ 'ModelContainerResource',
+ 'ModelContainerResourceArmPaginatedResult',
+ 'ModelVersion',
+ 'ModelVersionResource',
+ 'ModelVersionResourceArmPaginatedResult',
+ 'Mpi',
+ 'NodeStateCounts',
+ 'NoneDatastoreCredentials',
+ 'NoneDatastoreSecrets',
+ 'NotebookAccessTokenResult',
+ 'NotebookPreparationError',
+ 'NotebookResourceInfo',
+ 'Objective',
+ 'OnlineDeployment',
+ 'OnlineDeploymentTrackedResource',
+ 'OnlineDeploymentTrackedResourceArmPaginatedResult',
+ 'OnlineEndpoint',
+ 'OnlineEndpointTrackedResource',
+ 'OnlineEndpointTrackedResourceArmPaginatedResult',
+ 'OnlineRequestSettings',
+ 'OnlineScaleSettings',
+ 'Operation',
+ 'OperationDisplay',
+ 'OperationListResult',
+ 'OutputDataBinding',
+ 'OutputPathAssetReference',
+ 'PaginatedComputeResourcesList',
+ 'PaginatedWorkspaceConnectionsList',
+ 'PartialAksOnlineDeployment',
+ 'PartialBatchDeployment',
+ 'PartialBatchDeploymentPartialTrackedResource',
+ 'PartialBatchEndpoint',
+ 'PartialBatchEndpointPartialTrackedResource',
+ 'PartialManagedOnlineDeployment',
+ 'PartialOnlineDeployment',
+ 'PartialOnlineDeploymentPartialTrackedResource',
+ 'PartialOnlineEndpoint',
+ 'PartialOnlineEndpointPartialTrackedResource',
+ 'Password',
+ 'PersonalComputeInstanceSettings',
+ 'PrivateEndpoint',
+ 'PrivateEndpointConnection',
+ 'PrivateEndpointConnectionListResult',
+ 'PrivateLinkResource',
+ 'PrivateLinkResourceListResult',
+ 'PrivateLinkServiceConnectionState',
+ 'ProbeSettings',
+ 'ProgressMetrics',
+ 'PyTorch',
+ 'QuotaBaseProperties',
+ 'QuotaUpdateParameters',
+ 'Recurrence',
+ 'RecurrenceSchedule',
+ 'RegenerateEndpointKeysRequest',
+ 'RegistryListCredentialsResult',
+ 'Resource',
+ 'ResourceId',
+ 'ResourceIdentity',
+ 'ResourceName',
+ 'ResourceQuota',
+ 'ResourceSkuLocationInfo',
+ 'ResourceSkuZoneDetails',
+ 'Restriction',
+ 'Route',
+ 'SasDatastoreCredentials',
+ 'SasDatastoreSecrets',
+ 'ScaleSettings',
+ 'ScriptReference',
+ 'ScriptsToExecute',
+ 'ServiceManagedResourcesSettings',
+ 'ServicePrincipalCredentials',
+ 'ServicePrincipalDatastoreCredentials',
+ 'ServicePrincipalDatastoreSecrets',
+ 'SetupScripts',
+ 'SharedPrivateLinkResource',
+ 'Sku',
+ 'SkuCapability',
+ 'SkuListResult',
+ 'SqlAdminDatastoreCredentials',
+ 'SqlAdminDatastoreSecrets',
+ 'SslConfiguration',
+ 'StatusMessage',
+ 'SweepJob',
+ 'SynapseSpark',
+ 'SynapseSparkPoolProperties',
+ 'SynapseSparkPoolPropertiesautogenerated',
+ 'SystemData',
+ 'SystemService',
+ 'TensorFlow',
+ 'TrackedResource',
+ 'TrialComponent',
+ 'TruncationSelectionPolicy',
+ 'UpdateWorkspaceQuotas',
+ 'UpdateWorkspaceQuotasResult',
+ 'Usage',
+ 'UsageName',
+ 'UserAccountCredentials',
+ 'UserAssignedIdentity',
+ 'UserAssignedIdentityMeta',
+ 'VirtualMachine',
+ 'VirtualMachineImage',
+ 'VirtualMachineProperties',
+ 'VirtualMachineSecrets',
+ 'VirtualMachineSize',
+ 'VirtualMachineSizeListResult',
+ 'VirtualMachineSshCredentials',
+ 'Workspace',
+ 'WorkspaceConnection',
+ 'WorkspaceListResult',
+ 'WorkspaceSku',
+ 'WorkspaceUpdateParameters',
+ 'AllocationState',
+ 'ApplicationSharingPolicy',
+ 'BatchLoggingLevel',
+ 'BatchOutputAction',
+ 'BillingCurrency',
+ 'ClusterPurpose',
+ 'ComputeInstanceAuthorizationType',
+ 'ComputeInstanceState',
+ 'ComputePowerAction',
+ 'ComputeType',
+ 'ContainerType',
+ 'ContentsType',
+ 'CreatedByType',
+ 'CredentialsType',
+ 'DataBindingMode',
+ 'DatasetType',
+ 'DaysOfWeek',
+ 'DeploymentProvisioningState',
+ 'DistributionType',
+ 'DockerSpecificationType',
+ 'EarlyTerminationPolicyType',
+ 'EncryptionStatus',
+ 'EndpointAuthMode',
+ 'EndpointComputeType',
+ 'EndpointProvisioningState',
+ 'EnvironmentSpecificationType',
+ 'ExportFormatType',
+ 'Goal',
+ 'IdentityConfigurationType',
+ 'ImageAnnotationType',
+ 'JobProvisioningState',
+ 'JobStatus',
+ 'JobType',
+ 'KeyType',
+ 'LoadBalancerType',
+ 'MediaType',
+ 'NodeState',
+ 'OperatingSystemType',
+ 'OperationName',
+ 'OperationStatus',
+ 'OrderString',
+ 'OriginType',
+ 'OsType',
+ 'PrivateEndpointConnectionProvisioningState',
+ 'PrivateEndpointServiceConnectionStatus',
+ 'ProvisioningState',
+ 'ProvisioningStatus',
+ 'QuotaUnit',
+ 'ReasonCode',
+ 'RecurrenceFrequency',
+ 'ReferenceType',
+ 'RemoteLoginPortPublicAccess',
+ 'ResourceIdentityAssignment',
+ 'ResourceIdentityType',
+ 'SamplingAlgorithm',
+ 'ScaleType',
+ 'ScheduleStatus',
+ 'ScheduleType',
+ 'SecretsType',
+ 'SshPublicAccess',
+ 'SslConfigurationStatus',
+ 'Status',
+ 'StatusMessageLevel',
+ 'TextAnnotationType',
+ 'TriggerType',
+ 'UnderlyingResourceAction',
+ 'UnitOfMeasure',
+ 'UsageUnit',
+ 'ValueFormat',
+ 'VmPriceOsType',
+ 'VmPriority',
+ 'VmTier',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
new file mode 100644
index 00000000000..1e2d79f43da
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
@@ -0,0 +1,637 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum, EnumMeta
+from six import with_metaclass
+
+class _CaseInsensitiveEnumMeta(EnumMeta):
+ def __getitem__(self, name):
+ return super().__getitem__(name.upper())
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+ """
+ try:
+ return cls._member_map_[name.upper()]
+ except KeyError:
+ raise AttributeError(name)
+
+
+class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Allocation state of the compute. Possible values are: steady - Indicates that the compute is
+ not resizing. There are no changes to the number of compute nodes in the compute in progress. A
+ compute enters this state when it is created and when no operations are being performed on the
+ compute to change the number of compute nodes. resizing - Indicates that the compute is
+ resizing; that is, compute nodes are being added to or removed from the compute.
+ """
+
+ STEADY = "Steady"
+ RESIZING = "Resizing"
+
+class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Policy for sharing applications on this compute instance among users of parent workspace. If
+ Personal, only the creator can access applications on this compute instance. When Shared, any
+ workspace user can access applications on this instance depending on his/her assigned role.
+ """
+
+ PERSONAL = "Personal"
+ SHARED = "Shared"
+
+class BatchLoggingLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Log verbosity for batch inferencing.
+ Increasing verbosity order for logging is : Warning, Info and Debug.
+ The default value is Info.
+ """
+
+ INFO = "Info"
+ WARNING = "Warning"
+ DEBUG = "Debug"
+
+class BatchOutputAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine how batch inferencing will handle output
+ """
+
+ SUMMARY_ONLY = "SummaryOnly"
+ APPEND_ROW = "AppendRow"
+
+class BillingCurrency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Three lettered code specifying the currency of the VM price. Example: USD
+ """
+
+ USD = "USD"
+
+class ClusterPurpose(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Intended usage of the cluster
+ """
+
+ FAST_PROD = "FastProd"
+ DENSE_PROD = "DenseProd"
+ DEV_TEST = "DevTest"
+
+class ComputeInstanceAuthorizationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The Compute Instance Authorization type. Available values are personal (default).
+ """
+
+ PERSONAL = "personal"
+
+class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Current state of an ComputeInstance.
+ """
+
+ CREATING = "Creating"
+ CREATE_FAILED = "CreateFailed"
+ DELETING = "Deleting"
+ RUNNING = "Running"
+ RESTARTING = "Restarting"
+ JOB_RUNNING = "JobRunning"
+ SETTING_UP = "SettingUp"
+ SETUP_FAILED = "SetupFailed"
+ STARTING = "Starting"
+ STOPPED = "Stopped"
+ STOPPING = "Stopping"
+ USER_SETTING_UP = "UserSettingUp"
+ USER_SETUP_FAILED = "UserSetupFailed"
+ UNKNOWN = "Unknown"
+ UNUSABLE = "Unusable"
+
+class ComputePowerAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The compute power action.
+ """
+
+ START = "Start"
+ STOP = "Stop"
+
+class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of compute
+ """
+
+ AKS = "AKS"
+ AML_COMPUTE = "AmlCompute"
+ COMPUTE_INSTANCE = "ComputeInstance"
+ DATA_FACTORY = "DataFactory"
+ VIRTUAL_MACHINE = "VirtualMachine"
+ HD_INSIGHT = "HDInsight"
+ DATABRICKS = "Databricks"
+ DATA_LAKE_ANALYTICS = "DataLakeAnalytics"
+ SYNAPSE_SPARK = "SynapseSpark"
+
+class ContainerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ STORAGE_INITIALIZER = "StorageInitializer"
+ INFERENCE_SERVER = "InferenceServer"
+
+class ContentsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine the datastore contents type.
+ """
+
+ AZURE_BLOB = "AzureBlob"
+ AZURE_DATA_LAKE_GEN1 = "AzureDataLakeGen1"
+ AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2"
+ AZURE_FILE = "AzureFile"
+ AZURE_MY_SQL = "AzureMySql"
+ AZURE_POSTGRE_SQL = "AzurePostgreSql"
+ AZURE_SQL_DATABASE = "AzureSqlDatabase"
+ GLUSTER_FS = "GlusterFs"
+
+class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of identity that created the resource.
+ """
+
+ USER = "User"
+ APPLICATION = "Application"
+ MANAGED_IDENTITY = "ManagedIdentity"
+ KEY = "Key"
+
+class CredentialsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine the datastore credentials type.
+ """
+
+ ACCOUNT_KEY = "AccountKey"
+ CERTIFICATE = "Certificate"
+ NONE = "None"
+ SAS = "Sas"
+ SERVICE_PRINCIPAL = "ServicePrincipal"
+ SQL_ADMIN = "SqlAdmin"
+
+class DataBindingMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Describes how the data should be attached to the container.
+ """
+
+ MOUNT = "Mount"
+ DOWNLOAD = "Download"
+ UPLOAD = "Upload"
+
+class DatasetType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ SIMPLE = "Simple"
+ DATAFLOW = "Dataflow"
+
+class DaysOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ SUNDAY = "Sunday"
+ MONDAY = "Monday"
+ TUESDAY = "Tuesday"
+ WEDNESDAY = "Wednesday"
+ THURSDAY = "Thursday"
+ FRIDAY = "Friday"
+ SATURDAY = "Saturday"
+
+class DeploymentProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SCALING = "Scaling"
+ UPDATING = "Updating"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+class DistributionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine the job distribution type.
+ """
+
+ PY_TORCH = "PyTorch"
+ TENSOR_FLOW = "TensorFlow"
+ MPI = "Mpi"
+
+class DockerSpecificationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine docker specification type. Must be either Build or Image.
+ """
+
+ BUILD = "Build"
+ IMAGE = "Image"
+
+class EarlyTerminationPolicyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ BANDIT = "Bandit"
+ MEDIAN_STOPPING = "MedianStopping"
+ TRUNCATION_SELECTION = "TruncationSelection"
+
+class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Indicates whether or not the encryption is enabled for the workspace.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class EndpointAuthMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine endpoint authentication mode.
+ """
+
+ AML_TOKEN = "AMLToken"
+ KEY = "Key"
+ AAD_TOKEN = "AADToken"
+
+class EndpointComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ MANAGED = "Managed"
+ K8_S = "K8S"
+ AZURE_ML_COMPUTE = "AzureMLCompute"
+
+class EndpointProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of endpoint provisioning.
+ """
+
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ UPDATING = "Updating"
+ CANCELED = "Canceled"
+
+class EnvironmentSpecificationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Environment specification is either user created or curated by Azure ML service
+ """
+
+ CURATED = "Curated"
+ USER_CREATED = "UserCreated"
+
+class ExportFormatType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The format of exported labels.
+ """
+
+ DATASET = "Dataset"
+ COCO = "Coco"
+ CSV = "CSV"
+
+class Goal(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Defines supported metric goals for hyperparameter tuning
+ """
+
+ MINIMIZE = "Minimize"
+ MAXIMIZE = "Maximize"
+
+class IdentityConfigurationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine identity framework.
+ """
+
+ MANAGED = "Managed"
+ AML_TOKEN = "AMLToken"
+
+class ImageAnnotationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Annotation type of image data.
+ """
+
+ CLASSIFICATION = "Classification"
+ BOUNDING_BOX = "BoundingBox"
+ INSTANCE_SEGMENTATION = "InstanceSegmentation"
+
+class JobProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+ IN_PROGRESS = "InProgress"
+
+class JobStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The status of a job.
+ """
+
+ NOT_STARTED = "NotStarted"
+ STARTING = "Starting"
+ PROVISIONING = "Provisioning"
+ PREPARING = "Preparing"
+ QUEUED = "Queued"
+ RUNNING = "Running"
+ FINALIZING = "Finalizing"
+ CANCEL_REQUESTED = "CancelRequested"
+ COMPLETED = "Completed"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+ NOT_RESPONDING = "NotResponding"
+ PAUSED = "Paused"
+ UNKNOWN = "Unknown"
+
+class JobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine the type of job.
+ """
+
+ COMMAND = "Command"
+ SWEEP = "Sweep"
+ LABELING = "Labeling"
+
+class KeyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ PRIMARY = "Primary"
+ SECONDARY = "Secondary"
+
+class LoadBalancerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Load Balancer Type
+ """
+
+ PUBLIC_IP = "PublicIp"
+ INTERNAL_LOAD_BALANCER = "InternalLoadBalancer"
+
+class MediaType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Media type of data asset.
+ """
+
+ IMAGE = "Image"
+ TEXT = "Text"
+
+class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the compute node. Values are idle, running, preparing, unusable, leaving and
+ preempted.
+ """
+
+ IDLE = "idle"
+ RUNNING = "running"
+ PREPARING = "preparing"
+ UNUSABLE = "unusable"
+ LEAVING = "leaving"
+ PREEMPTED = "preempted"
+
+class OperatingSystemType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of operating system.
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Name of the last operation.
+ """
+
+ CREATE = "Create"
+ START = "Start"
+ STOP = "Stop"
+ RESTART = "Restart"
+ REIMAGE = "Reimage"
+ DELETE = "Delete"
+
+class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operation status.
+ """
+
+ IN_PROGRESS = "InProgress"
+ SUCCEEDED = "Succeeded"
+ CREATE_FAILED = "CreateFailed"
+ START_FAILED = "StartFailed"
+ STOP_FAILED = "StopFailed"
+ RESTART_FAILED = "RestartFailed"
+ REIMAGE_FAILED = "ReimageFailed"
+ DELETE_FAILED = "DeleteFailed"
+
+class OrderString(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ CREATED_AT_DESC = "CreatedAtDesc"
+ CREATED_AT_ASC = "CreatedAtAsc"
+ UPDATED_AT_DESC = "UpdatedAtDesc"
+ UPDATED_AT_ASC = "UpdatedAtAsc"
+
+class OriginType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine the type of linked service.
+ """
+
+ SYNAPSE = "Synapse"
+
+class OsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Compute OS Type
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current provisioning state.
+ """
+
+ SUCCEEDED = "Succeeded"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ FAILED = "Failed"
+
+class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The private endpoint connection status.
+ """
+
+ PENDING = "Pending"
+ APPROVED = "Approved"
+ REJECTED = "Rejected"
+ DISCONNECTED = "Disconnected"
+ TIMEOUT = "Timeout"
+
+class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current deployment state of workspace resource. The provisioningState is to indicate states
+ for resource provisioning.
+ """
+
+ UNKNOWN = "Unknown"
+ UPDATING = "Updating"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+class ProvisioningStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current deployment state of schedule.
+ """
+
+ COMPLETED = "Completed"
+ PROVISIONING = "Provisioning"
+ FAILED = "Failed"
+
+class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of quota measurement.
+ """
+
+ COUNT = "Count"
+
+class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The reason for the restriction.
+ """
+
+ NOT_SPECIFIED = "NotSpecified"
+ NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion"
+ NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription"
+
+class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The recurrence frequency.
+ """
+
+ NOT_SPECIFIED = "NotSpecified"
+ SECOND = "Second"
+ MINUTE = "Minute"
+ HOUR = "Hour"
+ DAY = "Day"
+ WEEK = "Week"
+ MONTH = "Month"
+ YEAR = "Year"
+
+class ReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine which reference method to use for an asset.
+ """
+
+ ID = "Id"
+ DATA_PATH = "DataPath"
+ OUTPUT_PATH = "OutputPath"
+
+class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is
+ open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed
+ on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be
+ default only during cluster creation time, after creation it will be either enabled or
+ disabled.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+ NOT_SPECIFIED = "NotSpecified"
+
+class ResourceIdentityAssignment(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Defines values for a ResourceIdentity's type.
+ """
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+ NONE = "None"
+
+class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The identity type.
+ """
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ NONE = "None"
+
+class SamplingAlgorithm(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ GRID = "Grid"
+ RANDOM = "Random"
+ BAYESIAN = "Bayesian"
+
+class ScaleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ AUTO = "Auto"
+ MANUAL = "Manual"
+
+class ScheduleStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The schedule status.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class ScheduleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The schedule type.
+ """
+
+ COMPUTE_START_STOP = "ComputeStartStop"
+
+class SecretsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enum to determine the datastore secrets type.
+ """
+
+ ACCOUNT_KEY = "AccountKey"
+ CERTIFICATE = "Certificate"
+ NONE = "None"
+ SAS = "Sas"
+ SERVICE_PRINCIPAL = "ServicePrincipal"
+ SQL_ADMIN = "SqlAdmin"
+
+class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on this instance. Enabled - Indicates that the public ssh port is open and
+ accessible according to the VNet/subnet policy if applicable.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class SslConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enable or disable ssl for scoring
+ """
+
+ DISABLED = "Disabled"
+ ENABLED = "Enabled"
+ AUTO = "Auto"
+
+class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Status of update workspace quota.
+ """
+
+ UNDEFINED = "Undefined"
+ SUCCESS = "Success"
+ FAILURE = "Failure"
+ INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum"
+ INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit"
+ INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName"
+ OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku"
+ OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion"
+
+class StatusMessageLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ ERROR = "Error"
+ INFORMATION = "Information"
+ WARNING = "Warning"
+
+class TextAnnotationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Annotation type of text data.
+ """
+
+ CLASSIFICATION = "Classification"
+
+class TriggerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The schedule trigger type.
+ """
+
+ RECURRENCE = "Recurrence"
+ CRON = "Cron"
+
+class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ DELETE = "Delete"
+ DETACH = "Detach"
+
+class UnitOfMeasure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The unit of time measurement for the specified VM price. Example: OneHour
+ """
+
+ ONE_HOUR = "OneHour"
+
+class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of usage measurement.
+ """
+
+ COUNT = "Count"
+
+class ValueFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """format for the workspace connection value
+ """
+
+ JSON = "JSON"
+
+class VmPriceOsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operating system type used by the VM.
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Virtual Machine priority
+ """
+
+ DEDICATED = "Dedicated"
+ LOW_PRIORITY = "LowPriority"
+
+class VmTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of the VM.
+ """
+
+ STANDARD = "Standard"
+ LOW_PRIORITY = "LowPriority"
+ SPOT = "Spot"
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
new file mode 100644
index 00000000000..eedb1b356a6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
@@ -0,0 +1,10131 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class DatastoreCredentials(msrest.serialization.Model):
+ """Base definition for datastore credentials.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, ServicePrincipalDatastoreCredentials, SqlAdminDatastoreCredentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', 'Certificate': 'CertificateDatastoreCredentials', 'None': 'NoneDatastoreCredentials', 'Sas': 'SasDatastoreCredentials', 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials', 'SqlAdmin': 'SqlAdminDatastoreCredentials'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = None # type: Optional[str]
+
+
+class AccountKeyDatastoreCredentials(DatastoreCredentials):
+ """Account key datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: Storage account secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.AccountKeyDatastoreSecrets
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AccountKeyDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'AccountKey' # type: str
+ self.secrets = kwargs.get('secrets', None)
+
+
+class DatastoreSecrets(msrest.serialization.Model):
+ """Base definition for datastore secrets.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, NoneDatastoreSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets, SqlAdminDatastoreSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', 'None': 'NoneDatastoreSecrets', 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets', 'SqlAdmin': 'SqlAdminDatastoreSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = None # type: Optional[str]
+
+
+class AccountKeyDatastoreSecrets(DatastoreSecrets):
+ """Datastore account key secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param key: Storage account key.
+ :type key: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AccountKeyDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'AccountKey' # type: str
+ self.key = kwargs.get('key', None)
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, SynapseSpark, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = kwargs.get('disable_local_auth', None)
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Aks, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = kwargs.get('user_kube_config', None)
+ self.admin_kube_config = kwargs.get('admin_kube_config', None)
+ self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None)
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = kwargs.get('subnet_id', None)
+ self.service_cidr = kwargs.get('service_cidr', None)
+ self.dns_service_ip = kwargs.get('dns_service_ip', None)
+ self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None)
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd",
+ "DenseProd", "DevTest". Default value: "FastProd".
+ :type cluster_purpose: str or ~azure_machine_learning_workspaces.models.ClusterPurpose
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ :param load_balancer_type: Load Balancer Type. Possible values include: "PublicIp",
+ "InternalLoadBalancer". Default value: "PublicIp".
+ :type load_balancer_type: str or ~azure_machine_learning_workspaces.models.LoadBalancerType
+ :param load_balancer_subnet: Load Balancer Subnet.
+ :type load_balancer_subnet: str
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'},
+ 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = kwargs.get('cluster_fqdn', None)
+ self.system_services = None
+ self.agent_count = kwargs.get('agent_count', None)
+ self.agent_vm_size = kwargs.get('agent_vm_size', None)
+ self.cluster_purpose = kwargs.get('cluster_purpose', "FastProd")
+ self.ssl_configuration = kwargs.get('ssl_configuration', None)
+ self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None)
+ self.load_balancer_type = kwargs.get('load_balancer_type', "PublicIp")
+ self.load_balancer_subnet = kwargs.get('load_balancer_subnet', None)
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = kwargs.get('os_type', "Linux")
+ self.vm_size = kwargs.get('vm_size', None)
+ self.vm_priority = kwargs.get('vm_priority', None)
+ self.virtual_machine_image = kwargs.get('virtual_machine_image', None)
+ self.isolated_network = kwargs.get('isolated_network', None)
+ self.scale_settings = kwargs.get('scale_settings', None)
+ self.user_account_credentials = kwargs.get('user_account_credentials', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified")
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True)
+
+
+class IdentityConfiguration(msrest.serialization.Model):
+ """Base definition for identity configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlToken, ManagedIdentity.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityConfigurationType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdentityConfiguration, self).__init__(**kwargs)
+ self.identity_type = None # type: Optional[str]
+
+
+class AmlToken(IdentityConfiguration):
+ """AML Token identity configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityConfigurationType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlToken, self).__init__(**kwargs)
+ self.identity_type = 'AMLToken' # type: str
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.display_name = kwargs.get('display_name', None)
+ self.description = kwargs.get('description', None)
+
+
+class AssetReferenceBase(msrest.serialization.Model):
+ """Base definition for asset references.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DataPathAssetReference, IdAssetReference, OutputPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssetReferenceBase, self).__init__(**kwargs)
+ self.reference_type = None # type: Optional[str]
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = kwargs['object_id']
+ self.tenant_id = kwargs['tenant_id']
+
+
+class AutoPauseProperties(msrest.serialization.Model):
+ """Auto pause properties.
+
+ :param delay_in_minutes:
+ :type delay_in_minutes: int
+ :param enabled:
+ :type enabled: bool
+ """
+
+ _attribute_map = {
+ 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoPauseProperties, self).__init__(**kwargs)
+ self.delay_in_minutes = kwargs.get('delay_in_minutes', None)
+ self.enabled = kwargs.get('enabled', None)
+
+
+class AutoScaleProperties(msrest.serialization.Model):
+ """Auto scale properties.
+
+ :param min_node_count:
+ :type min_node_count: int
+ :param enabled:
+ :type enabled: bool
+ :param max_node_count:
+ :type max_node_count: int
+ """
+
+ _attribute_map = {
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoScaleProperties, self).__init__(**kwargs)
+ self.min_node_count = kwargs.get('min_node_count', None)
+ self.enabled = kwargs.get('enabled', None)
+ self.max_node_count = kwargs.get('max_node_count', None)
+
+
+class OnlineScaleSettings(msrest.serialization.Model):
+ """Online deployment scaling configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AutoScaleSettings, ManualScaleSettings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_instances: Maximum number of instances for this deployment.
+ :type max_instances: int
+ :param min_instances: Minimum number of instances for this deployment.
+ :type min_instances: int
+ :param scale_type: Required. Type of deployment scaling algorithm.Constant filled by server.
+ Possible values include: "Auto", "Manual".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleType
+ """
+
+ _validation = {
+ 'scale_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_instances': {'key': 'maxInstances', 'type': 'int'},
+ 'min_instances': {'key': 'minInstances', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'scale_type': {'Auto': 'AutoScaleSettings', 'Manual': 'ManualScaleSettings'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineScaleSettings, self).__init__(**kwargs)
+ self.max_instances = kwargs.get('max_instances', None)
+ self.min_instances = kwargs.get('min_instances', None)
+ self.scale_type = None # type: Optional[str]
+
+
+class AutoScaleSettings(OnlineScaleSettings):
+ """AutoScaleSettings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_instances: Maximum number of instances for this deployment.
+ :type max_instances: int
+ :param min_instances: Minimum number of instances for this deployment.
+ :type min_instances: int
+ :param scale_type: Required. Type of deployment scaling algorithm.Constant filled by server.
+ Possible values include: "Auto", "Manual".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleType
+ :param polling_interval: The polling interval in ISO 8691 format. Only supports duration with
+ precision as low as Seconds.
+ :type polling_interval: ~datetime.timedelta
+ :param target_utilization_percentage: Target CPU usage for the autoscaler.
+ :type target_utilization_percentage: int
+ """
+
+ _validation = {
+ 'scale_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_instances': {'key': 'maxInstances', 'type': 'int'},
+ 'min_instances': {'key': 'minInstances', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'},
+ 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoScaleSettings, self).__init__(**kwargs)
+ self.scale_type = 'Auto' # type: str
+ self.polling_interval = kwargs.get('polling_interval', None)
+ self.target_utilization_percentage = kwargs.get('target_utilization_percentage', None)
+
+
+class DatastoreContents(msrest.serialization.Model):
+ """Base definition for datastore contents configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AzureBlobContents, AzureDataLakeGen1Contents, AzureDataLakeGen2Contents, AzureFileContents, AzurePostgreSqlContents, AzureSqlDatabaseContents, GlusterFsContents.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'contents_type': {'AzureBlob': 'AzureBlobContents', 'AzureDataLakeGen1': 'AzureDataLakeGen1Contents', 'AzureDataLakeGen2': 'AzureDataLakeGen2Contents', 'AzureFile': 'AzureFileContents', 'AzurePostgreSql': 'AzurePostgreSqlContents', 'AzureSqlDatabase': 'AzureSqlDatabaseContents', 'GlusterFs': 'GlusterFsContents'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreContents, self).__init__(**kwargs)
+ self.contents_type = None # type: Optional[str]
+
+
+class AzureBlobContents(DatastoreContents):
+ """Azure Blob datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureBlobContents, self).__init__(**kwargs)
+ self.contents_type = 'AzureBlob' # type: str
+ self.account_name = kwargs['account_name']
+ self.container_name = kwargs['container_name']
+ self.credentials = kwargs['credentials']
+ self.endpoint = kwargs['endpoint']
+ self.protocol = kwargs['protocol']
+
+
+class AzureDataLakeGen1Contents(DatastoreContents):
+ """Azure Data Lake Gen1 datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param store_name: Required. Azure Data Lake store name.
+ :type store_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'credentials': {'required': True},
+ 'store_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'store_name': {'key': 'storeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureDataLakeGen1Contents, self).__init__(**kwargs)
+ self.contents_type = 'AzureDataLakeGen1' # type: str
+ self.credentials = kwargs['credentials']
+ self.store_name = kwargs['store_name']
+
+
+class AzureDataLakeGen2Contents(DatastoreContents):
+ """Azure Data Lake Gen2 datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureDataLakeGen2Contents, self).__init__(**kwargs)
+ self.contents_type = 'AzureDataLakeGen2' # type: str
+ self.account_name = kwargs['account_name']
+ self.container_name = kwargs['container_name']
+ self.credentials = kwargs['credentials']
+ self.endpoint = kwargs['endpoint']
+ self.protocol = kwargs['protocol']
+
+
+class AzureFileContents(DatastoreContents):
+ """Azure File datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureFileContents, self).__init__(**kwargs)
+ self.contents_type = 'AzureFile' # type: str
+ self.account_name = kwargs['account_name']
+ self.container_name = kwargs['container_name']
+ self.credentials = kwargs['credentials']
+ self.endpoint = kwargs['endpoint']
+ self.protocol = kwargs['protocol']
+
+
+class AzurePostgreSqlContents(DatastoreContents):
+ """Azure Postgre SQL datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param enable_ssl: Whether the Azure PostgreSQL server requires SSL.
+ :type enable_ssl: bool
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'enable_ssl': {'key': 'enableSSL', 'type': 'bool'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzurePostgreSqlContents, self).__init__(**kwargs)
+ self.contents_type = 'AzurePostgreSql' # type: str
+ self.credentials = kwargs['credentials']
+ self.database_name = kwargs['database_name']
+ self.enable_ssl = kwargs.get('enable_ssl', None)
+ self.endpoint = kwargs['endpoint']
+ self.port_number = kwargs['port_number']
+ self.server_name = kwargs['server_name']
+
+
+class AzureSqlDatabaseContents(DatastoreContents):
+ """Azure SQL Database datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AzureSqlDatabaseContents, self).__init__(**kwargs)
+ self.contents_type = 'AzureSqlDatabase' # type: str
+ self.credentials = kwargs['credentials']
+ self.database_name = kwargs['database_name']
+ self.endpoint = kwargs['endpoint']
+ self.port_number = kwargs['port_number']
+ self.server_name = kwargs['server_name']
+
+
+class EarlyTerminationPolicy(msrest.serialization.Model):
+ """Early termination policies enable canceling poor-performing runs before they complete.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: BanditPolicy, MedianStoppingPolicy, TruncationSelectionPolicy.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', 'TruncationSelection': 'TruncationSelectionPolicy'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EarlyTerminationPolicy, self).__init__(**kwargs)
+ self.delay_evaluation = kwargs.get('delay_evaluation', None)
+ self.evaluation_interval = kwargs.get('evaluation_interval', None)
+ self.policy_type = None # type: Optional[str]
+
+
+class BanditPolicy(EarlyTerminationPolicy):
+ """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param slack_amount: Absolute distance allowed from the best performing run.
+ :type slack_amount: float
+ :param slack_factor: Ratio of the allowed distance from the best performing run.
+ :type slack_factor: float
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'slack_amount': {'key': 'slackAmount', 'type': 'float'},
+ 'slack_factor': {'key': 'slackFactor', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BanditPolicy, self).__init__(**kwargs)
+ self.policy_type = 'Bandit' # type: str
+ self.slack_amount = kwargs.get('slack_amount', None)
+ self.slack_factor = kwargs.get('slack_factor', None)
+
+
+class BatchDeployment(msrest.serialization.Model):
+ """Batch inference settings per deployment.
+
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param compute: Configuration for compute binding.
+ :type compute: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param error_threshold: Error threshold, if the error count for the entire input goes above
+ this value,
+ the batch inference will be aborted. Range is [-1, int.MaxValue].
+ For FileDataset, this value is the count of file failures.
+ For TabularDataset, this value is the count of record failures.
+ If set to -1 (the lower bound), all failures during batch inference will be ignored.
+ :type error_threshold: int
+ :param logging_level: Logging level for batch inference operation. Possible values include:
+ "Info", "Warning", "Debug".
+ :type logging_level: str or ~azure_machine_learning_workspaces.models.BatchLoggingLevel
+ :param mini_batch_size: Size of the mini-batch passed to each batch invocation.
+ For FileDataset, this is the number of files per mini-batch.
+ For TabularDataset, this is the size of the records in bytes, per mini-batch.
+ :type mini_batch_size: long
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param output_configuration: Output configuration for the batch inference operation.
+ :type output_configuration: ~azure_machine_learning_workspaces.models.BatchOutputConfiguration
+ :param partition_keys: Partition keys list used for Named partitioning.
+ :type partition_keys: list[str]
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :param retry_settings: Retry Settings for the batch inference operation.
+ :type retry_settings: ~azure_machine_learning_workspaces.models.BatchRetrySettings
+ """
+
+ _attribute_map = {
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'error_threshold': {'key': 'errorThreshold', 'type': 'int'},
+ 'logging_level': {'key': 'loggingLevel', 'type': 'str'},
+ 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'output_configuration': {'key': 'outputConfiguration', 'type': 'BatchOutputConfiguration'},
+ 'partition_keys': {'key': 'partitionKeys', 'type': '[str]'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchDeployment, self).__init__(**kwargs)
+ self.code_configuration = kwargs.get('code_configuration', None)
+ self.compute = kwargs.get('compute', None)
+ self.description = kwargs.get('description', None)
+ self.environment_id = kwargs.get('environment_id', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.error_threshold = kwargs.get('error_threshold', None)
+ self.logging_level = kwargs.get('logging_level', None)
+ self.mini_batch_size = kwargs.get('mini_batch_size', None)
+ self.model = kwargs.get('model', None)
+ self.output_configuration = kwargs.get('output_configuration', None)
+ self.partition_keys = kwargs.get('partition_keys', None)
+ self.properties = kwargs.get('properties', None)
+ self.retry_settings = kwargs.get('retry_settings', None)
+
+
+class Resource(msrest.serialization.Model):
+ """Common fields that are returned in the response for all Azure Resource Manager resources.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+
+
+class TrackedResource(Resource):
+ """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TrackedResource, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.location = kwargs['location']
+
+
+class BatchDeploymentTrackedResource(TrackedResource):
+ """BatchDeploymentTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.BatchDeployment
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'BatchDeployment'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchDeploymentTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of BatchDeployment entities.
+
+ :param next_link: The link to the next page of BatchDeployment objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type BatchDeployment.
+ :type value: list[~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[BatchDeploymentTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class BatchEndpoint(msrest.serialization.Model):
+ """Batch endpoint configuration.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param auth_mode: Enum to determine endpoint authentication mode. Possible values include:
+ "AMLToken", "Key", "AADToken".
+ :type auth_mode: str or ~azure_machine_learning_workspaces.models.EndpointAuthMode
+ :param description: Description of the inference endpoint.
+ :type description: str
+ :param keys: EndpointAuthKeys to set initially on an Endpoint.
+ This property will always be returned as null. AuthKey values must be retrieved using the
+ ListKeys API.
+ :type keys: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar scoring_uri: Endpoint URI.
+ :vartype scoring_uri: str
+ :ivar swagger_uri: Endpoint Swagger URI.
+ :vartype swagger_uri: str
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _validation = {
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'auth_mode': {'key': 'authMode', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchEndpoint, self).__init__(**kwargs)
+ self.auth_mode = kwargs.get('auth_mode', None)
+ self.description = kwargs.get('description', None)
+ self.keys = kwargs.get('keys', None)
+ self.properties = kwargs.get('properties', None)
+ self.scoring_uri = None
+ self.swagger_uri = None
+ self.traffic = kwargs.get('traffic', None)
+
+
+class BatchEndpointTrackedResource(TrackedResource):
+ """BatchEndpointTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.BatchEndpoint
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'BatchEndpoint'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchEndpointTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class BatchEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of BatchEndpoint entities.
+
+ :param next_link: The link to the next page of BatchEndpoint objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type BatchEndpoint.
+ :type value: list[~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[BatchEndpointTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class BatchOutputConfiguration(msrest.serialization.Model):
+ """Batch inference output configuration.
+
+ :param append_row_file_name: Customized output file name for append_row output action.
+ :type append_row_file_name: str
+ :param output_action: Indicates how the output will be organized. Possible values include:
+ "SummaryOnly", "AppendRow".
+ :type output_action: str or ~azure_machine_learning_workspaces.models.BatchOutputAction
+ """
+
+ _attribute_map = {
+ 'append_row_file_name': {'key': 'appendRowFileName', 'type': 'str'},
+ 'output_action': {'key': 'outputAction', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchOutputConfiguration, self).__init__(**kwargs)
+ self.append_row_file_name = kwargs.get('append_row_file_name', None)
+ self.output_action = kwargs.get('output_action', None)
+
+
+class BatchRetrySettings(msrest.serialization.Model):
+ """Retry settings for a batch inference operation.
+
+ :param max_retries: Maximum retry count for a mini-batch.
+ :type max_retries: int
+ :param timeout: Invocation timeout for a mini-batch, in ISO 8601 format.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _attribute_map = {
+ 'max_retries': {'key': 'maxRetries', 'type': 'int'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BatchRetrySettings, self).__init__(**kwargs)
+ self.max_retries = kwargs.get('max_retries', None)
+ self.timeout = kwargs.get('timeout', None)
+
+
+class CertificateDatastoreCredentials(DatastoreCredentials):
+ """Certificate datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param secrets: Service principal secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.CertificateDatastoreSecrets
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ :param thumbprint: Required. Thumbprint of the certificate used for authentication.
+ :type thumbprint: str
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ 'client_id': {'required': True},
+ 'tenant_id': {'required': True},
+ 'thumbprint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'thumbprint': {'key': 'thumbprint', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CertificateDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'Certificate' # type: str
+ self.authority_url = kwargs.get('authority_url', None)
+ self.client_id = kwargs['client_id']
+ self.resource_uri = kwargs.get('resource_uri', None)
+ self.secrets = kwargs.get('secrets', None)
+ self.tenant_id = kwargs['tenant_id']
+ self.thumbprint = kwargs['thumbprint']
+
+
+class CertificateDatastoreSecrets(DatastoreSecrets):
+ """Datastore certificate secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param certificate: Service principal certificate.
+ :type certificate: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'certificate': {'key': 'certificate', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CertificateDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'Certificate' # type: str
+ self.certificate = kwargs.get('certificate', None)
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = kwargs.get('scale_settings', None)
+
+
+class ExportSummary(msrest.serialization.Model):
+ """ExportSummary.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: CsvExportSummary, CocoExportSummary, DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ }
+
+ _subtype_map = {
+ 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ExportSummary, self).__init__(**kwargs)
+ self.end_time_utc = None
+ self.exported_row_count = None
+ self.format = None # type: Optional[str]
+ self.labeling_job_id = None
+ self.start_time_utc = None
+
+
+class CocoExportSummary(ExportSummary):
+ """CocoExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'container_name': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CocoExportSummary, self).__init__(**kwargs)
+ self.format = 'Coco' # type: str
+ self.container_name = None
+ self.snapshot_path = None
+
+
+class CodeConfiguration(msrest.serialization.Model):
+ """Configuration for a scoring code asset.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code_id: ARM resource ID of the code asset.
+ :type code_id: str
+ :param scoring_script: Required. The script to execute on startup. eg. "score.py".
+ :type scoring_script: str
+ """
+
+ _validation = {
+ 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'code_id': {'key': 'codeId', 'type': 'str'},
+ 'scoring_script': {'key': 'scoringScript', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeConfiguration, self).__init__(**kwargs)
+ self.code_id = kwargs.get('code_id', None)
+ self.scoring_script = kwargs['scoring_script']
+
+
+class CodeContainer(msrest.serialization.Model):
+ """Container for code asset versions.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeContainer, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class CodeContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.CodeContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'CodeContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeContainerResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeContainer entities.
+
+ :param next_link: The link to the next page of CodeContainer objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type CodeContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[CodeContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class CodeVersion(msrest.serialization.Model):
+ """Code asset version details.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param description: The asset description text.
+ :type description: str
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param path: Required. The path of the file/directory in the datastore.
+ :type path: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeVersion, self).__init__(**kwargs)
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.description = kwargs.get('description', None)
+ self.is_anonymous = kwargs.get('is_anonymous', None)
+ self.path = kwargs['path']
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class CodeVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.CodeVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'CodeVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeVersionResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeVersion entities.
+
+ :param next_link: The link to the next page of CodeVersion objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type CodeVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[CodeVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class JobBase(msrest.serialization.Model):
+ """Base definition for a job.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: CommandJob, SweepJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the job provisioning state. Possible values include:
+ "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ _subtype_map = {
+ 'job_type': {'Command': 'CommandJob', 'Sweep': 'SweepJob'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobBase, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.interaction_endpoints = None
+ self.job_type = None # type: Optional[str]
+ self.properties = kwargs.get('properties', None)
+ self.provisioning_state = None
+ self.tags = kwargs.get('tags', None)
+
+
+class CommandJob(JobBase):
+ """Command job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the job provisioning state. Possible values include:
+ "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param code_id: ARM resource ID of the code asset.
+ :type code_id: str
+ :param command: Required. The command to execute on startup of the job. eg. "python train.py".
+ :type command: str
+ :param compute: Required. Compute binding for the job.
+ :type compute: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param distribution: Distribution configuration of the job. If set, this should be one of Mpi,
+ Tensorflow, PyTorch, or null.
+ :type distribution: ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ :param environment_id: The ARM resource ID of the Environment specification for the job.
+ :type environment_id: str
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param identity: Identity configuration. If set, this should be one of AmlToken,
+ ManagedIdentity, or null.
+ Defaults to AmlToken if null.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ :param input_data_bindings: Mapping of input data bindings used in the job.
+ :type input_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.InputDataBinding]
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param output_data_bindings: Mapping of output data bindings used in the job.
+ :type output_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.OutputDataBinding]
+ :ivar parameters: Input parameters.
+ :vartype parameters: dict[str, object]
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview feature and only available to users on the allow list.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused", "Unknown".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param timeout: The max run duration in ISO 8601 format, after which the job will be cancelled.
+ Only supports duration with precision as low as Seconds.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ 'compute': {'required': True},
+ 'output': {'readonly': True},
+ 'parameters': {'readonly': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'code_id': {'key': 'codeId', 'type': 'str'},
+ 'command': {'key': 'command', 'type': 'str'},
+ 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
+ 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'},
+ 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
+ 'parameters': {'key': 'parameters', 'type': '{object}'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CommandJob, self).__init__(**kwargs)
+ self.job_type = 'Command' # type: str
+ self.code_id = kwargs.get('code_id', None)
+ self.command = kwargs['command']
+ self.compute = kwargs['compute']
+ self.distribution = kwargs.get('distribution', None)
+ self.environment_id = kwargs.get('environment_id', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.experiment_name = kwargs.get('experiment_name', None)
+ self.identity = kwargs.get('identity', None)
+ self.input_data_bindings = kwargs.get('input_data_bindings', None)
+ self.output = None
+ self.output_data_bindings = kwargs.get('output_data_bindings', None)
+ self.parameters = None
+ self.priority = kwargs.get('priority', None)
+ self.status = None
+ self.timeout = kwargs.get('timeout', None)
+
+
+class Components1D3SwueSchemasComputeresourceAllof1(msrest.serialization.Model):
+ """Components1D3SwueSchemasComputeresourceAllof1.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Components1D3SwueSchemasComputeresourceAllof1, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeConfiguration(msrest.serialization.Model):
+ """Configuration for compute binding.
+
+ :param instance_count: Number of instances or nodes.
+ :type instance_count: int
+ :param instance_type: SKU type to run on.
+ :type instance_type: str
+ :param is_local: Set to true for jobs running on local compute.
+ :type is_local: bool
+ :param location: Location for virtual cluster run.
+ :type location: str
+ :param properties: Additional properties.
+ :type properties: dict[str, str]
+ :param target: ARM resource ID of the compute resource.
+ :type target: str
+ """
+
+ _attribute_map = {
+ 'instance_count': {'key': 'instanceCount', 'type': 'int'},
+ 'instance_type': {'key': 'instanceType', 'type': 'str'},
+ 'is_local': {'key': 'isLocal', 'type': 'bool'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'target': {'key': 'target', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeConfiguration, self).__init__(**kwargs)
+ self.instance_count = kwargs.get('instance_count', None)
+ self.instance_type = kwargs.get('instance_type', None)
+ self.is_local = kwargs.get('is_local', None)
+ self.location = kwargs.get('location', None)
+ self.properties = kwargs.get('properties', None)
+ self.target = kwargs.get('target', None)
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(**kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.endpoint_uri = kwargs.get('endpoint_uri', None)
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = kwargs.get('operation_name', None)
+ self.operation_time = kwargs.get('operation_time', None)
+ self.operation_status = kwargs.get('operation_status', None)
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ :param schedules: The list of schedules to be applied on the compute instance.
+ :type schedules: ~azure_machine_learning_workspaces.models.ComputeSchedules
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = kwargs.get('vm_size', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared")
+ self.ssh_settings = kwargs.get('ssh_settings', None)
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal")
+ self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None)
+ self.setup_scripts = kwargs.get('setup_scripts', None)
+ self.last_operation = None
+ self.schedules = kwargs.get('schedules', None)
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled")
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = kwargs.get('admin_public_key', None)
+
+
+class ComputeResource(Resource, Components1D3SwueSchemasComputeresourceAllof1):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.id = None
+ self.name = None
+ self.type = None
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+
+
+class ComputeSchedules(msrest.serialization.Model):
+ """The list of schedules to be applied on the computes.
+
+ :param compute_start_stop: The list of compute start stop schedules to be applied.
+ :type compute_start_stop:
+ list[~azure_machine_learning_workspaces.models.ComputeStartStopSchedule]
+ """
+
+ _attribute_map = {
+ 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSchedules, self).__init__(**kwargs)
+ self.compute_start_stop = kwargs.get('compute_start_stop', None)
+
+
+class ComputeStartStopSchedule(msrest.serialization.Model):
+ """Compute start stop schedule properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Schedule id.
+ :vartype id: str
+ :ivar provisioning_status: The current deployment state of schedule. Possible values include:
+ "Completed", "Provisioning", "Failed".
+ :vartype provisioning_status: str or
+ ~azure_machine_learning_workspaces.models.ProvisioningStatus
+ :param status: The schedule status. Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.ScheduleStatus
+ :param trigger_type: The schedule trigger type. Possible values include: "Recurrence", "Cron".
+ :type trigger_type: str or ~azure_machine_learning_workspaces.models.TriggerType
+ :param action: The compute power action. Possible values include: "Start", "Stop".
+ :type action: str or ~azure_machine_learning_workspaces.models.ComputePowerAction
+ :param recurrence: The workflow trigger recurrence for ComputeStartStop schedule type.
+ :type recurrence: ~azure_machine_learning_workspaces.models.Recurrence
+ :param cron: The workflow trigger cron for ComputeStartStop schedule type.
+ :type cron: ~azure_machine_learning_workspaces.models.Cron
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'provisioning_status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'trigger_type': {'key': 'triggerType', 'type': 'str'},
+ 'action': {'key': 'action', 'type': 'str'},
+ 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
+ 'cron': {'key': 'cron', 'type': 'Cron'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeStartStopSchedule, self).__init__(**kwargs)
+ self.id = None
+ self.provisioning_status = None
+ self.status = kwargs.get('status', None)
+ self.trigger_type = kwargs.get('trigger_type', None)
+ self.action = kwargs.get('action', None)
+ self.recurrence = kwargs.get('recurrence', None)
+ self.cron = kwargs.get('cron', None)
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The minimum amount of CPU cores to be used by the container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu: float
+ :param cpu_limit: The maximum amount of CPU cores allowed to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu_limit: float
+ :param memory_in_gb: The minimum amount of memory (in GB) to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb: float
+ :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to be used by the
+ container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb_limit: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = kwargs.get('cpu', None)
+ self.cpu_limit = kwargs.get('cpu_limit', None)
+ self.memory_in_gb = kwargs.get('memory_in_gb', None)
+ self.memory_in_gb_limit = kwargs.get('memory_in_gb_limit', None)
+ self.gpu = kwargs.get('gpu', None)
+ self.fpga = kwargs.get('fpga', None)
+
+
+class CosmosDbSettings(msrest.serialization.Model):
+ """CosmosDbSettings.
+
+ :param collections_throughput: The throughput of the collections in cosmosdb database.
+ :type collections_throughput: int
+ """
+
+ _attribute_map = {
+ 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CosmosDbSettings, self).__init__(**kwargs)
+ self.collections_throughput = kwargs.get('collections_throughput', None)
+
+
+class Cron(msrest.serialization.Model):
+ """The workflow trigger cron for ComputeStartStop schedule type.
+
+ :param start_time: The start time.
+ :type start_time: str
+ :param time_zone: The time zone.
+ :type time_zone: str
+ :param expression: The cron expression.
+ :type expression: str
+ """
+
+ _attribute_map = {
+ 'start_time': {'key': 'startTime', 'type': 'str'},
+ 'time_zone': {'key': 'timeZone', 'type': 'str'},
+ 'expression': {'key': 'expression', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Cron, self).__init__(**kwargs)
+ self.start_time = kwargs.get('start_time', None)
+ self.time_zone = kwargs.get('time_zone', None)
+ self.expression = kwargs.get('expression', None)
+
+
+class CsvExportSummary(ExportSummary):
+ """CsvExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'container_name': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CsvExportSummary, self).__init__(**kwargs)
+ self.format = 'CSV' # type: str
+ self.container_name = None
+ self.snapshot_path = None
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ :param workspace_url: Workspace Url.
+ :type workspace_url: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+ self.workspace_url = kwargs.get('workspace_url', None)
+
+
+class DataContainer(msrest.serialization.Model):
+ """Container for data asset versions.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataContainer, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class DataContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.DataContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'DataContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataContainerResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class DataContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataContainer entities.
+
+ :param next_link: The link to the next page of DataContainer objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type DataContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.DataContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[DataContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(**kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(**kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None)
+
+
+class DataPathAssetReference(AssetReferenceBase):
+ """Reference to an asset via its path in a datastore.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param path: The path of the file/directory in the datastore.
+ :type path: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'DataPath' # type: str
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.path = kwargs.get('path', None)
+
+
+class DatasetExportSummary(ExportSummary):
+ """DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar labeled_asset_name: The unique name of the labeled data asset.
+ :vartype labeled_asset_name: str
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'labeled_asset_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatasetExportSummary, self).__init__(**kwargs)
+ self.format = 'Dataset' # type: str
+ self.labeled_asset_name = None
+
+
+class DatastoreProperties(msrest.serialization.Model):
+ """Datastore definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents: Required. Reference to the datastore storage contents.
+ :type contents: ~azure_machine_learning_workspaces.models.DatastoreContents
+ :param description: The asset description text.
+ :type description: str
+ :ivar has_been_validated: Whether the service has validated access to the datastore with the
+ provided credentials.
+ :vartype has_been_validated: bool
+ :param is_default: Whether this datastore is the default for the workspace.
+ :type is_default: bool
+ :param linked_info: Information about the datastore origin, if linked.
+ :type linked_info: ~azure_machine_learning_workspaces.models.LinkedInfo
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'contents': {'required': True},
+ 'has_been_validated': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'contents': {'key': 'contents', 'type': 'DatastoreContents'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'has_been_validated': {'key': 'hasBeenValidated', 'type': 'bool'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'linked_info': {'key': 'linkedInfo', 'type': 'LinkedInfo'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreProperties, self).__init__(**kwargs)
+ self.contents = kwargs['contents']
+ self.description = kwargs.get('description', None)
+ self.has_been_validated = None
+ self.is_default = kwargs.get('is_default', None)
+ self.linked_info = kwargs.get('linked_info', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class DatastorePropertiesResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.DatastoreProperties
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'DatastoreProperties'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastorePropertiesResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class DatastorePropertiesResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DatastoreProperties entities.
+
+ :param next_link: The link to the next page of DatastoreProperties objects. If null, there are
+ no additional pages.
+ :type next_link: str
+ :param value: An array of objects of type DatastoreProperties.
+ :type value: list[~azure_machine_learning_workspaces.models.DatastorePropertiesResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[DatastorePropertiesResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastorePropertiesResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class DataVersion(msrest.serialization.Model):
+ """Data asset version details.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param dataset_type: The Format of dataset. Possible values include: "Simple", "Dataflow".
+ :type dataset_type: str or ~azure_machine_learning_workspaces.models.DatasetType
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param description: The asset description text.
+ :type description: str
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param path: Required. The path of the file/directory in the datastore.
+ :type path: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'dataset_type': {'key': 'datasetType', 'type': 'str'},
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataVersion, self).__init__(**kwargs)
+ self.dataset_type = kwargs.get('dataset_type', None)
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.description = kwargs.get('description', None)
+ self.is_anonymous = kwargs.get('is_anonymous', None)
+ self.path = kwargs['path']
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class DataVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.DataVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'DataVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataVersionResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class DataVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataVersion entities.
+
+ :param next_link: The link to the next page of DataVersion objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type DataVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.DataVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[DataVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class DeploymentLogs(msrest.serialization.Model):
+ """DeploymentLogs.
+
+ :param content: The retrieved online deployment logs.
+ :type content: str
+ """
+
+ _attribute_map = {
+ 'content': {'key': 'content', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DeploymentLogs, self).__init__(**kwargs)
+ self.content = kwargs.get('content', None)
+
+
+class DeploymentLogsRequest(msrest.serialization.Model):
+ """DeploymentLogsRequest.
+
+ :param container_type: The type of container to retrieve logs from. Possible values include:
+ "StorageInitializer", "InferenceServer".
+ :type container_type: str or ~azure_machine_learning_workspaces.models.ContainerType
+ :param tail: The maximum number of lines to tail.
+ :type tail: int
+ """
+
+ _attribute_map = {
+ 'container_type': {'key': 'containerType', 'type': 'str'},
+ 'tail': {'key': 'tail', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DeploymentLogsRequest, self).__init__(**kwargs)
+ self.container_type = kwargs.get('container_type', None)
+ self.tail = kwargs.get('tail', None)
+
+
+class DistributionConfiguration(msrest.serialization.Model):
+ """Base definition for job distribution configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Mpi, PyTorch, TensorFlow.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'TensorFlow': 'TensorFlow'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DistributionConfiguration, self).__init__(**kwargs)
+ self.distribution_type = None # type: Optional[str]
+
+
+class DockerSpecification(msrest.serialization.Model):
+ """Configuration settings for Docker.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DockerBuild, DockerImage.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ }
+
+ _subtype_map = {
+ 'docker_specification_type': {'Build': 'DockerBuild', 'Image': 'DockerImage'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerSpecification, self).__init__(**kwargs)
+ self.docker_specification_type = None # type: Optional[str]
+ self.platform = kwargs.get('platform', None)
+
+
+class DockerBuild(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param context: Path to a snapshot of the Docker Context. This property is only valid if
+ Dockerfile is specified.
+ The path is relative to the asset path which must contain a single Blob URI value.
+
+
+ .. raw:: html
+
+ .
+ :type context: str
+ :param dockerfile: Required. Docker command line instructions to assemble an image.
+
+
+ .. raw:: html
+
+ .
+ :type dockerfile: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'dockerfile': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'context': {'key': 'context', 'type': 'str'},
+ 'dockerfile': {'key': 'dockerfile', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerBuild, self).__init__(**kwargs)
+ self.docker_specification_type = 'Build' # type: str
+ self.context = kwargs.get('context', None)
+ self.dockerfile = kwargs['dockerfile']
+
+
+class DockerImage(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param docker_image_uri: Required. Image name of a custom base image.
+
+
+ .. raw:: html
+
+ .
+ :type docker_image_uri: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'docker_image_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'docker_image_uri': {'key': 'dockerImageUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerImage, self).__init__(**kwargs)
+ self.docker_specification_type = 'Image' # type: str
+ self.docker_image_uri = kwargs['docker_image_uri']
+
+
+class DockerImagePlatform(msrest.serialization.Model):
+ """DockerImagePlatform.
+
+ :param operating_system_type: The OS type the Environment. Possible values include: "Linux",
+ "Windows".
+ :type operating_system_type: str or
+ ~azure_machine_learning_workspaces.models.OperatingSystemType
+ """
+
+ _attribute_map = {
+ 'operating_system_type': {'key': 'operatingSystemType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DockerImagePlatform, self).__init__(**kwargs)
+ self.operating_system_type = kwargs.get('operating_system_type', None)
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param identity: The identity that will be used to access the key vault for encryption at rest.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityForCmk
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityForCmk'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = kwargs['status']
+ self.identity = kwargs.get('identity', None)
+ self.key_vault_properties = kwargs['key_vault_properties']
+
+
+class EndpointAuthKeys(msrest.serialization.Model):
+ """Keys for endpoint authentication.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EndpointAuthKeys, self).__init__(**kwargs)
+ self.primary_key = kwargs.get('primary_key', None)
+ self.secondary_key = kwargs.get('secondary_key', None)
+
+
+class EndpointAuthToken(msrest.serialization.Model):
+ """Service Token.
+
+ :param access_token: Access token.
+ :type access_token: str
+ :param expiry_time_utc: Access token expiry time (UTC).
+ :type expiry_time_utc: long
+ :param refresh_after_time_utc: Refresh access token after time (UTC).
+ :type refresh_after_time_utc: long
+ :param token_type: Access token type.
+ :type token_type: str
+ """
+
+ _attribute_map = {
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'},
+ 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EndpointAuthToken, self).__init__(**kwargs)
+ self.access_token = kwargs.get('access_token', None)
+ self.expiry_time_utc = kwargs.get('expiry_time_utc', None)
+ self.refresh_after_time_utc = kwargs.get('refresh_after_time_utc', None)
+ self.token_type = kwargs.get('token_type', None)
+
+
+class EnvironmentContainer(msrest.serialization.Model):
+ """Container for environment specification versions.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentContainer, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class EnvironmentContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.EnvironmentContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'EnvironmentContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentContainerResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentContainer entities.
+
+ :param next_link: The link to the next page of EnvironmentContainer objects. If null, there are
+ no additional pages.
+ :type next_link: str
+ :param value: An array of objects of type EnvironmentContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.EnvironmentContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[EnvironmentContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class EnvironmentSpecificationVersion(msrest.serialization.Model):
+ """Environment specification version details.
+
+
+.. raw:: html
+
+ .
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param conda_file: Standard configuration file used by Conda that lets you install any kind of
+ package, including Python, R, and C/C++ packages.
+
+
+ .. raw:: html
+
+ .
+ :type conda_file: str
+ :param description: The asset description text.
+ :type description: str
+ :param docker: Configuration settings for Docker.
+ :type docker: ~azure_machine_learning_workspaces.models.DockerSpecification
+ :ivar environment_specification_type: Environment specification is either user managed or
+ curated by the Azure ML service
+
+
+ .. raw:: html
+
+ . Possible values include: "Curated", "UserCreated".
+ :vartype environment_specification_type: str or
+ ~azure_machine_learning_workspaces.models.EnvironmentSpecificationType
+ :param inference_container_properties: Defines configuration specific to inference.
+ :type inference_container_properties:
+ ~azure_machine_learning_workspaces.models.InferenceContainerProperties
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'environment_specification_type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'conda_file': {'key': 'condaFile', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'docker': {'key': 'docker', 'type': 'DockerSpecification'},
+ 'environment_specification_type': {'key': 'environmentSpecificationType', 'type': 'str'},
+ 'inference_container_properties': {'key': 'inferenceContainerProperties', 'type': 'InferenceContainerProperties'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersion, self).__init__(**kwargs)
+ self.conda_file = kwargs.get('conda_file', None)
+ self.description = kwargs.get('description', None)
+ self.docker = kwargs.get('docker', None)
+ self.environment_specification_type = None
+ self.inference_container_properties = kwargs.get('inference_container_properties', None)
+ self.is_anonymous = kwargs.get('is_anonymous', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class EnvironmentSpecificationVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'EnvironmentSpecificationVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class EnvironmentSpecificationVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentSpecificationVersion entities.
+
+ :param next_link: The link to the next page of EnvironmentSpecificationVersion objects. If
+ null, there are no additional pages.
+ :type next_link: str
+ :param value: An array of objects of type EnvironmentSpecificationVersion.
+ :type value:
+ list[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[EnvironmentSpecificationVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+ """The resource management error additional info.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The additional info type.
+ :vartype type: str
+ :ivar info: The additional info.
+ :vartype info: object
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'info': {'key': 'info', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorAdditionalInfo, self).__init__(**kwargs)
+ self.type = None
+ self.info = None
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """The error detail.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: The error code.
+ :vartype code: str
+ :ivar message: The error message.
+ :vartype message: str
+ :ivar target: The error target.
+ :vartype target: str
+ :ivar details: The error details.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ :ivar additional_info: The error additional info.
+ :vartype additional_info: list[~azure_machine_learning_workspaces.models.ErrorAdditionalInfo]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'target': {'readonly': True},
+ 'details': {'readonly': True},
+ 'additional_info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+ self.additional_info = None
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).
+
+ :param error: The error object.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorDetail
+ """
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorDetail'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.error = kwargs.get('error', None)
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = kwargs['retail_price']
+ self.os_type = kwargs['os_type']
+ self.vm_tier = kwargs['vm_tier']
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = kwargs['billing_currency']
+ self.unit_of_measure = kwargs['unit_of_measure']
+ self.values = kwargs['values']
+
+
+class FlavorData(msrest.serialization.Model):
+ """FlavorData.
+
+ :param data: Model flavor-specific data.
+ :type data: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FlavorData, self).__init__(**kwargs)
+ self.data = kwargs.get('data', None)
+
+
+class GlusterFsContents(DatastoreContents):
+ """GlusterFs datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param server_address: Required. GlusterFS server address (can be the IP address or server
+ name).
+ :type server_address: str
+ :param volume_name: Required. GlusterFS volume name.
+ :type volume_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'server_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'volume_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'server_address': {'key': 'serverAddress', 'type': 'str'},
+ 'volume_name': {'key': 'volumeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GlusterFsContents, self).__init__(**kwargs)
+ self.contents_type = 'GlusterFs' # type: str
+ self.server_address = kwargs['server_address']
+ self.volume_name = kwargs['volume_name']
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(**kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class IdAssetReference(AssetReferenceBase):
+ """Reference to an asset via its ARM resource ID.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param asset_id: Required. ARM resource ID of the asset.
+ :type asset_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ 'asset_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'asset_id': {'key': 'assetId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'Id' # type: str
+ self.asset_id = kwargs['asset_id']
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = kwargs.get('type', None)
+ self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
+
+
+class IdentityForCmk(msrest.serialization.Model):
+ """Identity that will be used to access key vault for encryption at rest.
+
+ :param user_assigned_identity: The ArmId of the user assigned identity that will be used to
+ access the customer managed key vault.
+ :type user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdentityForCmk, self).__init__(**kwargs)
+ self.user_assigned_identity = kwargs.get('user_assigned_identity', None)
+
+
+class InferenceContainerProperties(msrest.serialization.Model):
+ """InferenceContainerProperties.
+
+ :param liveness_route: The route to check the liveness of the inference server container.
+ :type liveness_route: ~azure_machine_learning_workspaces.models.Route
+ :param readiness_route: The route to check the readiness of the inference server container.
+ :type readiness_route: ~azure_machine_learning_workspaces.models.Route
+ :param scoring_route: The port to send the scoring requests to, within the inference server
+ container.
+ :type scoring_route: ~azure_machine_learning_workspaces.models.Route
+ """
+
+ _attribute_map = {
+ 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'},
+ 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'},
+ 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(InferenceContainerProperties, self).__init__(**kwargs)
+ self.liveness_route = kwargs.get('liveness_route', None)
+ self.readiness_route = kwargs.get('readiness_route', None)
+ self.scoring_route = kwargs.get('scoring_route', None)
+
+
+class InputDataBinding(msrest.serialization.Model):
+ """InputDataBinding.
+
+ :param data_id: ARM resource ID of the registered dataVersion.
+ :type data_id: str
+ :param mode: Mechanism for accessing the data artifact. Possible values include: "Mount",
+ "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ :param path_on_compute: Location of data inside the container process.
+ :type path_on_compute: str
+ """
+
+ _attribute_map = {
+ 'data_id': {'key': 'dataId', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(InputDataBinding, self).__init__(**kwargs)
+ self.data_id = kwargs.get('data_id', None)
+ self.mode = kwargs.get('mode', None)
+ self.path_on_compute = kwargs.get('path_on_compute', None)
+
+
+class JobBaseResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.JobBase
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'JobBase'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobBaseResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class JobBaseResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of JobBase entities.
+
+ :param next_link: The link to the next page of JobBase objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type JobBase.
+ :type value: list[~azure_machine_learning_workspaces.models.JobBaseResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[JobBaseResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class JobEndpoint(msrest.serialization.Model):
+ """Job endpoint definition.
+
+ :param endpoint: Url for endpoint.
+ :type endpoint: str
+ :param job_endpoint_type: Endpoint type.
+ :type job_endpoint_type: str
+ :param port: Port for endpoint.
+ :type port: int
+ :param properties: Additional properties to set on the endpoint.
+ :type properties: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'job_endpoint_type': {'key': 'jobEndpointType', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobEndpoint, self).__init__(**kwargs)
+ self.endpoint = kwargs.get('endpoint', None)
+ self.job_endpoint_type = kwargs.get('job_endpoint_type', None)
+ self.port = kwargs.get('port', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class JobOutput(msrest.serialization.Model):
+ """Job output definition container information on where to find job output/logs.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar datastore_id: ARM ID of the datastore where the job logs and artifacts are stored, or
+ null for the default container ("azureml") in the workspace's storage account.
+ :vartype datastore_id: str
+ :ivar path: Path within the datastore to the job logs and artifacts.
+ :vartype path: str
+ """
+
+ _validation = {
+ 'datastore_id': {'readonly': True},
+ 'path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobOutput, self).__init__(**kwargs)
+ self.datastore_id = None
+ self.path = None
+
+
+class OnlineDeployment(msrest.serialization.Model):
+ """OnlineDeployment.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: K8SOnlineDeployment, ManagedOnlineDeployment.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: If true, enables Application Insights logging.
+ :type app_insights_enabled: bool
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ }
+
+ _subtype_map = {
+ 'endpoint_compute_type': {'K8S': 'K8SOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineDeployment, self).__init__(**kwargs)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.code_configuration = kwargs.get('code_configuration', None)
+ self.description = kwargs.get('description', None)
+ self.endpoint_compute_type = None # type: Optional[str]
+ self.environment_id = kwargs.get('environment_id', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.liveness_probe = kwargs.get('liveness_probe', None)
+ self.model = kwargs.get('model', None)
+ self.properties = kwargs.get('properties', None)
+ self.provisioning_state = None
+ self.request_settings = kwargs.get('request_settings', None)
+ self.scale_settings = kwargs.get('scale_settings', None)
+
+
+class K8SOnlineDeployment(OnlineDeployment):
+ """K8SOnlineDeployment.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: If true, enables Application Insights logging.
+ :type app_insights_enabled: bool
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param container_resource_requirements: Resource requirements for each container instance
+ within an online deployment.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(K8SOnlineDeployment, self).__init__(**kwargs)
+ self.endpoint_compute_type = 'K8S' # type: str
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = kwargs['key_vault_arm_id']
+ self.key_identifier = kwargs['key_identifier']
+ self.identity_client_id = kwargs.get('identity_client_id', None)
+
+
+class LabelCategory(msrest.serialization.Model):
+ """Label category definition.
+
+ :param allow_multi_select: Indicates whether it is allowed to select multiple classes in this
+ category.
+ :type allow_multi_select: bool
+ :param classes: Dictionary of label classes in this category.
+ :type classes: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ :param display_name: Display name of the label category.
+ :type display_name: str
+ """
+
+ _attribute_map = {
+ 'allow_multi_select': {'key': 'allowMultiSelect', 'type': 'bool'},
+ 'classes': {'key': 'classes', 'type': '{LabelClass}'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelCategory, self).__init__(**kwargs)
+ self.allow_multi_select = kwargs.get('allow_multi_select', None)
+ self.classes = kwargs.get('classes', None)
+ self.display_name = kwargs.get('display_name', None)
+
+
+class LabelClass(msrest.serialization.Model):
+ """Label class definition.
+
+ :param display_name: Display name of the label class.
+ :type display_name: str
+ :param subclasses: Dictionary of subclasses of the label class.
+ :type subclasses: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelClass, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.subclasses = kwargs.get('subclasses', None)
+
+
+class LabelingDatasetConfiguration(msrest.serialization.Model):
+ """Labeling dataset configuration definition.
+
+ :param asset_name: Name of the data asset to perform labeling.
+ :type asset_name: str
+ :param dataset_version: AML dataset version.
+ :type dataset_version: str
+ :param incremental_dataset_refresh_enabled: Indicates whether to enable incremental dataset
+ refresh.
+ :type incremental_dataset_refresh_enabled: bool
+ """
+
+ _attribute_map = {
+ 'asset_name': {'key': 'assetName', 'type': 'str'},
+ 'dataset_version': {'key': 'datasetVersion', 'type': 'str'},
+ 'incremental_dataset_refresh_enabled': {'key': 'incrementalDatasetRefreshEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingDatasetConfiguration, self).__init__(**kwargs)
+ self.asset_name = kwargs.get('asset_name', None)
+ self.dataset_version = kwargs.get('dataset_version', None)
+ self.incremental_dataset_refresh_enabled = kwargs.get('incremental_dataset_refresh_enabled', None)
+
+
+class LabelingJob(msrest.serialization.Model):
+ """Labeling job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar created_time_utc: Created time of the job in UTC timezone.
+ :vartype created_time_utc: ~datetime.datetime
+ :param dataset_configuration: Configuration of dataset used in the job.
+ :type dataset_configuration:
+ ~azure_machine_learning_workspaces.models.LabelingDatasetConfiguration
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_instructions: Labeling instructions of the job.
+ :type job_instructions: ~azure_machine_learning_workspaces.models.LabelingJobInstructions
+ :param job_type: Required. Specifies the type of job. This field should always be set to
+ "Labeling". Possible values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param label_categories: Label categories of the job.
+ :type label_categories: dict[str, ~azure_machine_learning_workspaces.models.LabelCategory]
+ :param labeling_job_media_properties: Media type specific properties in the job.
+ :type labeling_job_media_properties:
+ ~azure_machine_learning_workspaces.models.LabelingJobMediaProperties
+ :param ml_assist_configuration: Configuration of MLAssist feature in the job.
+ :type ml_assist_configuration: ~azure_machine_learning_workspaces.models.MlAssistConfiguration
+ :ivar progress_metrics: Progress metrics of the job.
+ :vartype progress_metrics: ~azure_machine_learning_workspaces.models.ProgressMetrics
+ :ivar project_id: Internal id of the job(Previously called project).
+ :vartype project_id: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the labeling job provisioning state. Possible values
+ include: "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused", "Unknown".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :ivar status_messages: Status messages of the job.
+ :vartype status_messages: list[~azure_machine_learning_workspaces.models.StatusMessage]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'created_time_utc': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'progress_metrics': {'readonly': True},
+ 'project_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'status': {'readonly': True},
+ 'status_messages': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
+ 'dataset_configuration': {'key': 'datasetConfiguration', 'type': 'LabelingDatasetConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'},
+ 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'},
+ 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MlAssistConfiguration'},
+ 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'},
+ 'project_id': {'key': 'projectId', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJob, self).__init__(**kwargs)
+ self.created_time_utc = None
+ self.dataset_configuration = kwargs.get('dataset_configuration', None)
+ self.description = kwargs.get('description', None)
+ self.interaction_endpoints = None
+ self.job_instructions = kwargs.get('job_instructions', None)
+ self.job_type = kwargs['job_type']
+ self.label_categories = kwargs.get('label_categories', None)
+ self.labeling_job_media_properties = kwargs.get('labeling_job_media_properties', None)
+ self.ml_assist_configuration = kwargs.get('ml_assist_configuration', None)
+ self.progress_metrics = None
+ self.project_id = None
+ self.properties = kwargs.get('properties', None)
+ self.provisioning_state = None
+ self.status = None
+ self.status_messages = None
+ self.tags = kwargs.get('tags', None)
+
+
+class LabelingJobMediaProperties(msrest.serialization.Model):
+ """Properties of a labeling job.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobMediaProperties, self).__init__(**kwargs)
+ self.media_type = None # type: Optional[str]
+
+
+class LabelingJobImageProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for image data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of image labeling job. Possible values include:
+ "Classification", "BoundingBox", "InstanceSegmentation".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.ImageAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobImageProperties, self).__init__(**kwargs)
+ self.media_type = 'Image' # type: str
+ self.annotation_type = kwargs.get('annotation_type', None)
+
+
+class LabelingJobInstructions(msrest.serialization.Model):
+ """Instructions for labeling job.
+
+ :param uri: The link to a page with detailed labeling instructions for labelers.
+ :type uri: str
+ """
+
+ _attribute_map = {
+ 'uri': {'key': 'uri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobInstructions, self).__init__(**kwargs)
+ self.uri = kwargs.get('uri', None)
+
+
+class LabelingJobResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.LabelingJob
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'LabelingJob'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of LabelingJob entities.
+
+ :param next_link: The link to the next page of LabelingJob objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type LabelingJob.
+ :type value: list[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[LabelingJobResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class LabelingJobTextProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for text data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of text labeling job. Possible values include:
+ "Classification".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.TextAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobTextProperties, self).__init__(**kwargs)
+ self.media_type = 'Text' # type: str
+ self.annotation_type = kwargs.get('annotation_type', None)
+
+
+class LinkedInfo(msrest.serialization.Model):
+ """Information about a datastore origin, if linked.
+
+ :param linked_id: Linked service ID.
+ :type linked_id: str
+ :param linked_resource_name: Linked service resource name.
+ :type linked_resource_name: str
+ :param origin: Type of the linked service. Possible values include: "Synapse".
+ :type origin: str or ~azure_machine_learning_workspaces.models.OriginType
+ """
+
+ _attribute_map = {
+ 'linked_id': {'key': 'linkedId', 'type': 'str'},
+ 'linked_resource_name': {'key': 'linkedResourceName', 'type': 'str'},
+ 'origin': {'key': 'origin', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LinkedInfo, self).__init__(**kwargs)
+ self.linked_id = kwargs.get('linked_id', None)
+ self.linked_resource_name = kwargs.get('linked_resource_name', None)
+ self.origin = kwargs.get('origin', None)
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListStorageAccountKeysResult(msrest.serialization.Model):
+ """ListStorageAccountKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListStorageAccountKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :ivar notebook_access_keys:
+ :vartype notebook_access_keys: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ 'notebook_access_keys': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ManagedIdentity(IdentityConfiguration):
+ """Managed identity configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityConfigurationType
+ :param client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not
+ set this field.
+ :type client_id: str
+ :param object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not
+ set this field.
+ :type object_id: str
+ :param resource_id: Specifies a user-assigned identity by ARM resource ID. For system-assigned,
+ do not set this field.
+ :type resource_id: str
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedIdentity, self).__init__(**kwargs)
+ self.identity_type = 'Managed' # type: str
+ self.client_id = kwargs.get('client_id', None)
+ self.object_id = kwargs.get('object_id', None)
+ self.resource_id = kwargs.get('resource_id', None)
+
+
+class ManagedOnlineDeployment(OnlineDeployment):
+ """ManagedOnlineDeployment.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: If true, enables Application Insights logging.
+ :type app_insights_enabled: bool
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param instance_type: Compute instance type.
+ :type instance_type: str
+ :param readiness_probe: Deployment container liveness/readiness probe configuration.
+ :type readiness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'instance_type': {'key': 'instanceType', 'type': 'str'},
+ 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedOnlineDeployment, self).__init__(**kwargs)
+ self.endpoint_compute_type = 'Managed' # type: str
+ self.instance_type = kwargs.get('instance_type', None)
+ self.readiness_probe = kwargs.get('readiness_probe', None)
+
+
+class ManualScaleSettings(OnlineScaleSettings):
+ """ManualScaleSettings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_instances: Maximum number of instances for this deployment.
+ :type max_instances: int
+ :param min_instances: Minimum number of instances for this deployment.
+ :type min_instances: int
+ :param scale_type: Required. Type of deployment scaling algorithm.Constant filled by server.
+ Possible values include: "Auto", "Manual".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleType
+ :param instance_count: Fixed number of instances for this deployment.
+ :type instance_count: int
+ """
+
+ _validation = {
+ 'scale_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_instances': {'key': 'maxInstances', 'type': 'int'},
+ 'min_instances': {'key': 'minInstances', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ 'instance_count': {'key': 'instanceCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManualScaleSettings, self).__init__(**kwargs)
+ self.scale_type = 'Manual' # type: str
+ self.instance_count = kwargs.get('instance_count', None)
+
+
+class MedianStoppingPolicy(EarlyTerminationPolicy):
+ """Defines an early termination policy based on running averages of the primary metric of all runs.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MedianStoppingPolicy, self).__init__(**kwargs)
+ self.policy_type = 'MedianStopping' # type: str
+
+
+class MlAssistConfiguration(msrest.serialization.Model):
+ """Labeling MLAssist configuration definition.
+
+ :param inferencing_compute_binding: AML compute binding used in inferencing.
+ :type inferencing_compute_binding:
+ ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param ml_assist_enabled: Indicates whether MLAssist feature is enabled.
+ :type ml_assist_enabled: bool
+ :param training_compute_binding: AML compute binding used in training.
+ :type training_compute_binding: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ """
+
+ _attribute_map = {
+ 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'ComputeConfiguration'},
+ 'ml_assist_enabled': {'key': 'mlAssistEnabled', 'type': 'bool'},
+ 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'ComputeConfiguration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MlAssistConfiguration, self).__init__(**kwargs)
+ self.inferencing_compute_binding = kwargs.get('inferencing_compute_binding', None)
+ self.ml_assist_enabled = kwargs.get('ml_assist_enabled', None)
+ self.training_compute_binding = kwargs.get('training_compute_binding', None)
+
+
+class ModelContainer(msrest.serialization.Model):
+ """ModelContainer.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelContainer, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class ModelContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.ModelContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'ModelContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelContainerResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelContainer entities.
+
+ :param next_link: The link to the next page of ModelContainer objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type ModelContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[ModelContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class ModelVersion(msrest.serialization.Model):
+ """Model asset version details.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param description: The asset description text.
+ :type description: str
+ :param flavors: Mapping of model flavors to their properties.
+ :type flavors: dict[str, ~azure_machine_learning_workspaces.models.FlavorData]
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param path: Required. The path of the file/directory in the datastore.
+ :type path: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'flavors': {'key': 'flavors', 'type': '{FlavorData}'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelVersion, self).__init__(**kwargs)
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.description = kwargs.get('description', None)
+ self.flavors = kwargs.get('flavors', None)
+ self.is_anonymous = kwargs.get('is_anonymous', None)
+ self.path = kwargs['path']
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class ModelVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.ModelVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'ModelVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelVersionResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelVersion entities.
+
+ :param next_link: The link to the next page of ModelVersion objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type ModelVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[ModelVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class Mpi(DistributionConfiguration):
+ """MPI distribution configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count_per_instance: Number of processes per MPI node.
+ :type process_count_per_instance: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Mpi, self).__init__(**kwargs)
+ self.distribution_type = 'Mpi' # type: str
+ self.process_count_per_instance = kwargs.get('process_count_per_instance', None)
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NoneDatastoreCredentials(DatastoreCredentials):
+ """Empty/none datastore credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: Empty/none datastore secret.
+ :type secrets: ~azure_machine_learning_workspaces.models.DatastoreSecrets
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'DatastoreSecrets'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NoneDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'None' # type: str
+ self.secrets = kwargs.get('secrets', None)
+
+
+class NoneDatastoreSecrets(DatastoreSecrets):
+ """Empty/none datastore secret.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NoneDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'None' # type: str
+
+
+class NotebookAccessTokenResult(msrest.serialization.Model):
+ """NotebookAccessTokenResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar notebook_resource_id:
+ :vartype notebook_resource_id: str
+ :ivar host_name:
+ :vartype host_name: str
+ :ivar public_dns:
+ :vartype public_dns: str
+ :ivar access_token:
+ :vartype access_token: str
+ :ivar token_type:
+ :vartype token_type: str
+ :ivar expires_in:
+ :vartype expires_in: int
+ :ivar refresh_token:
+ :vartype refresh_token: str
+ :ivar scope:
+ :vartype scope: str
+ """
+
+ _validation = {
+ 'notebook_resource_id': {'readonly': True},
+ 'host_name': {'readonly': True},
+ 'public_dns': {'readonly': True},
+ 'access_token': {'readonly': True},
+ 'token_type': {'readonly': True},
+ 'expires_in': {'readonly': True},
+ 'refresh_token': {'readonly': True},
+ 'scope': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'},
+ 'host_name': {'key': 'hostName', 'type': 'str'},
+ 'public_dns': {'key': 'publicDns', 'type': 'str'},
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expires_in': {'key': 'expiresIn', 'type': 'int'},
+ 'refresh_token': {'key': 'refreshToken', 'type': 'str'},
+ 'scope': {'key': 'scope', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookAccessTokenResult, self).__init__(**kwargs)
+ self.notebook_resource_id = None
+ self.host_name = None
+ self.public_dns = None
+ self.access_token = None
+ self.token_type = None
+ self.expires_in = None
+ self.refresh_token = None
+ self.scope = None
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = kwargs.get('error_message', None)
+ self.status_code = kwargs.get('status_code', None)
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = kwargs.get('fqdn', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None)
+
+
+class Objective(msrest.serialization.Model):
+ """Optimization objective.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param goal: Required. Defines supported metric goals for hyperparameter tuning. Possible
+ values include: "Minimize", "Maximize".
+ :type goal: str or ~azure_machine_learning_workspaces.models.Goal
+ :param primary_metric: Required. Name of the metric to optimize.
+ :type primary_metric: str
+ """
+
+ _validation = {
+ 'goal': {'required': True},
+ 'primary_metric': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'goal': {'key': 'goal', 'type': 'str'},
+ 'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Objective, self).__init__(**kwargs)
+ self.goal = kwargs['goal']
+ self.primary_metric = kwargs['primary_metric']
+
+
+class OnlineDeploymentTrackedResource(TrackedResource):
+ """OnlineDeploymentTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.OnlineDeployment
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'OnlineDeployment'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineDeployment entities.
+
+ :param next_link: The link to the next page of OnlineDeployment objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type OnlineDeployment.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[OnlineDeploymentTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class OnlineEndpoint(msrest.serialization.Model):
+ """Online endpoint configuration.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param auth_mode: Required. Inference endpoint authentication mode type. Possible values
+ include: "AMLToken", "Key", "AADToken".
+ :type auth_mode: str or ~azure_machine_learning_workspaces.models.EndpointAuthMode
+ :param description: Description of the inference endpoint.
+ :type description: str
+ :param keys: EndpointAuthKeys to set initially on an Endpoint.
+ This property will always be returned as null. AuthKey values must be retrieved using the
+ ListKeys API.
+ :type keys: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: State of endpoint provisioning. Possible values include: "Creating",
+ "Deleting", "Succeeded", "Failed", "Updating", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.EndpointProvisioningState
+ :ivar scoring_uri: Endpoint URI.
+ :vartype scoring_uri: str
+ :ivar swagger_uri: Endpoint Swagger URI.
+ :vartype swagger_uri: str
+ :param target: ARM resource ID of the compute if it exists.
+ optional.
+ :type target: str
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _validation = {
+ 'auth_mode': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'auth_mode': {'key': 'authMode', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineEndpoint, self).__init__(**kwargs)
+ self.auth_mode = kwargs['auth_mode']
+ self.description = kwargs.get('description', None)
+ self.keys = kwargs.get('keys', None)
+ self.properties = kwargs.get('properties', None)
+ self.provisioning_state = None
+ self.scoring_uri = None
+ self.swagger_uri = None
+ self.target = kwargs.get('target', None)
+ self.traffic = kwargs.get('traffic', None)
+
+
+class OnlineEndpointTrackedResource(TrackedResource):
+ """OnlineEndpointTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.OnlineEndpoint
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'OnlineEndpoint'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.properties = kwargs['properties']
+ self.system_data = None
+
+
+class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineEndpoint entities.
+
+ :param next_link: The link to the next page of OnlineEndpoint objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type OnlineEndpoint.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[OnlineEndpointTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = kwargs.get('next_link', None)
+ self.value = kwargs.get('value', None)
+
+
+class OnlineRequestSettings(msrest.serialization.Model):
+ """Online deployment scoring requests configuration.
+
+ :param max_concurrent_requests_per_instance: The number of requests allowed to queue at once
+ for this deployment.
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait: The maximum queue wait time in ISO 8601 format. Supports millisecond
+ precision.
+ :type max_queue_wait: ~datetime.timedelta
+ :param request_timeout: The request timeout in ISO 8601 format. Supports millisecond precision.
+ :type request_timeout: ~datetime.timedelta
+ """
+
+ _attribute_map = {
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'},
+ 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OnlineRequestSettings, self).__init__(**kwargs)
+ self.max_concurrent_requests_per_instance = kwargs.get('max_concurrent_requests_per_instance', None)
+ self.max_queue_wait = kwargs.get('max_queue_wait', None)
+ self.request_timeout = kwargs.get('request_timeout', None)
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.display = kwargs.get('display', None)
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = kwargs.get('provider', None)
+ self.resource = kwargs.get('resource', None)
+ self.operation = kwargs.get('operation', None)
+ self.description = kwargs.get('description', None)
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class OutputDataBinding(msrest.serialization.Model):
+ """OutputDataBinding.
+
+ :param datastore_id: ARM resource ID of the datastore where the data output will be stored.
+ :type datastore_id: str
+ :param mode: Mechanism for data movement to datastore. Possible values include: "Mount",
+ "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ :param path_on_compute: Location of data inside the container process.
+ :type path_on_compute: str
+ :param path_on_datastore: Path within the datastore to the data.
+ :type path_on_datastore: str
+ """
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
+ 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OutputDataBinding, self).__init__(**kwargs)
+ self.datastore_id = kwargs.get('datastore_id', None)
+ self.mode = kwargs.get('mode', None)
+ self.path_on_compute = kwargs.get('path_on_compute', None)
+ self.path_on_datastore = kwargs.get('path_on_datastore', None)
+
+
+class OutputPathAssetReference(AssetReferenceBase):
+ """Reference to an asset via its path in a job output.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param job_id: ARM resource ID of the job.
+ :type job_id: str
+ :param path: The path of the file/directory in the job output.
+ :type path: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'job_id': {'key': 'jobId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OutputPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'OutputPath' # type: str
+ self.job_id = kwargs.get('job_id', None)
+ self.path = kwargs.get('path', None)
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class PartialOnlineDeployment(msrest.serialization.Model):
+ """Mutable online deployment configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: PartialAksOnlineDeployment, PartialManagedOnlineDeployment.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: Whether AppInsights telemetry is enabled for this online
+ deployment.
+ :type app_insights_enabled: bool
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ }
+
+ _subtype_map = {
+ 'endpoint_compute_type': {'K8S': 'PartialAksOnlineDeployment', 'Managed': 'PartialManagedOnlineDeployment'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineDeployment, self).__init__(**kwargs)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.endpoint_compute_type = None # type: Optional[str]
+ self.liveness_probe = kwargs.get('liveness_probe', None)
+ self.request_settings = kwargs.get('request_settings', None)
+ self.scale_settings = kwargs.get('scale_settings', None)
+
+
+class PartialAksOnlineDeployment(PartialOnlineDeployment):
+ """PartialAksOnlineDeployment.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: Whether AppInsights telemetry is enabled for this online
+ deployment.
+ :type app_insights_enabled: bool
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param container_resource_requirements: Resource requirements for each container instance
+ within an online deployment.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialAksOnlineDeployment, self).__init__(**kwargs)
+ self.endpoint_compute_type = 'K8S' # type: str
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+
+
+class PartialBatchDeployment(msrest.serialization.Model):
+ """Mutable batch inference settings per deployment.
+
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialBatchDeployment, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+
+
+class PartialBatchDeploymentPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialBatchDeployment
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialBatchDeploymentPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.location = kwargs.get('location', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class PartialBatchEndpoint(msrest.serialization.Model):
+ """Mutable Batch endpoint configuration.
+
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _attribute_map = {
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialBatchEndpoint, self).__init__(**kwargs)
+ self.traffic = kwargs.get('traffic', None)
+
+
+class PartialBatchEndpointPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialBatchEndpoint
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialBatchEndpoint'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialBatchEndpointPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.location = kwargs.get('location', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class PartialManagedOnlineDeployment(PartialOnlineDeployment):
+ """PartialManagedOnlineDeployment.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: Whether AppInsights telemetry is enabled for this online
+ deployment.
+ :type app_insights_enabled: bool
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param readiness_probe: Deployment container liveness/readiness probe configuration.
+ :type readiness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialManagedOnlineDeployment, self).__init__(**kwargs)
+ self.endpoint_compute_type = 'Managed' # type: str
+ self.readiness_probe = kwargs.get('readiness_probe', None)
+
+
+class PartialOnlineDeploymentPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineDeployment
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineDeployment'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineDeploymentPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.location = kwargs.get('location', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class PartialOnlineEndpoint(msrest.serialization.Model):
+ """Mutable online endpoint configuration.
+
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _attribute_map = {
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineEndpoint, self).__init__(**kwargs)
+ self.traffic = kwargs.get('traffic', None)
+
+
+class PartialOnlineEndpointPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineEndpoint
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineEndpoint'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PartialOnlineEndpointPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.kind = kwargs.get('kind', None)
+ self.location = kwargs.get('location', None)
+ self.properties = kwargs.get('properties', None)
+ self.tags = kwargs.get('tags', None)
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = kwargs.get('assigned_user', None)
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to.
+ :vartype subnet_arm_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'subnet_arm_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+ self.subnet_arm_id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.private_endpoint = kwargs.get('private_endpoint', None)
+ self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
+ self.provisioning_state = None
+
+
+class PrivateEndpointConnectionListResult(msrest.serialization.Model):
+ """List of private endpoint connection associated with the specified workspace.
+
+ :param value: Array of private endpoint connections.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpointConnectionListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = kwargs.get('required_zone_names', None)
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.description = kwargs.get('description', None)
+ self.actions_required = kwargs.get('actions_required', None)
+
+
+class ProbeSettings(msrest.serialization.Model):
+ """Deployment container liveness/readiness probe configuration.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param initial_delay: The delay before the first probe in ISO 8601 format.
+ :type initial_delay: ~datetime.timedelta
+ :param period: The length of time between probes in ISO 8601 format.
+ :type period: ~datetime.timedelta
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout: The probe timeout in ISO 8601 format.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'initial_delay': {'key': 'initialDelay', 'type': 'duration'},
+ 'period': {'key': 'period', 'type': 'duration'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ProbeSettings, self).__init__(**kwargs)
+ self.failure_threshold = kwargs.get('failure_threshold', None)
+ self.initial_delay = kwargs.get('initial_delay', None)
+ self.period = kwargs.get('period', None)
+ self.success_threshold = kwargs.get('success_threshold', None)
+ self.timeout = kwargs.get('timeout', None)
+
+
+class ProgressMetrics(msrest.serialization.Model):
+ """Progress metrics definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar completed_datapoint_count: The completed datapoint count.
+ :vartype completed_datapoint_count: long
+ :ivar incremental_dataset_last_refresh_time: The time of last successful incremental dataset
+ refresh in UTC.
+ :vartype incremental_dataset_last_refresh_time: ~datetime.datetime
+ :ivar skipped_datapoint_count: The skipped datapoint count.
+ :vartype skipped_datapoint_count: long
+ :ivar total_datapoint_count: The total datapoint count.
+ :vartype total_datapoint_count: long
+ """
+
+ _validation = {
+ 'completed_datapoint_count': {'readonly': True},
+ 'incremental_dataset_last_refresh_time': {'readonly': True},
+ 'skipped_datapoint_count': {'readonly': True},
+ 'total_datapoint_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'},
+ 'incremental_dataset_last_refresh_time': {'key': 'incrementalDatasetLastRefreshTime', 'type': 'iso-8601'},
+ 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'},
+ 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ProgressMetrics, self).__init__(**kwargs)
+ self.completed_datapoint_count = None
+ self.incremental_dataset_last_refresh_time = None
+ self.skipped_datapoint_count = None
+ self.total_datapoint_count = None
+
+
+class PyTorch(DistributionConfiguration):
+ """PyTorch distribution configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count: Total process count for the distributed job.
+ :type process_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count': {'key': 'processCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PyTorch, self).__init__(**kwargs)
+ self.distribution_type = 'PyTorch' # type: str
+ self.process_count = kwargs.get('process_count', None)
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.type = kwargs.get('type', None)
+ self.limit = kwargs.get('limit', None)
+ self.unit = kwargs.get('unit', None)
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ :param location: Region of workspace quota to be updated.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.location = kwargs.get('location', None)
+
+
+class Recurrence(msrest.serialization.Model):
+ """The workflow trigger recurrence for ComputeStartStop schedule type.
+
+ :param frequency: The recurrence frequency. Possible values include: "NotSpecified", "Second",
+ "Minute", "Hour", "Day", "Week", "Month", "Year".
+ :type frequency: str or ~azure_machine_learning_workspaces.models.RecurrenceFrequency
+ :param interval: The interval.
+ :type interval: int
+ :param start_time: The start time.
+ :type start_time: str
+ :param time_zone: The time zone.
+ :type time_zone: str
+ :param schedule: The recurrence schedule.
+ :type schedule: ~azure_machine_learning_workspaces.models.RecurrenceSchedule
+ """
+
+ _attribute_map = {
+ 'frequency': {'key': 'frequency', 'type': 'str'},
+ 'interval': {'key': 'interval', 'type': 'int'},
+ 'start_time': {'key': 'startTime', 'type': 'str'},
+ 'time_zone': {'key': 'timeZone', 'type': 'str'},
+ 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Recurrence, self).__init__(**kwargs)
+ self.frequency = kwargs.get('frequency', None)
+ self.interval = kwargs.get('interval', None)
+ self.start_time = kwargs.get('start_time', None)
+ self.time_zone = kwargs.get('time_zone', None)
+ self.schedule = kwargs.get('schedule', None)
+
+
+class RecurrenceSchedule(msrest.serialization.Model):
+ """The recurrence schedule.
+
+ :param minutes: The minutes.
+ :type minutes: list[int]
+ :param hours: The hours.
+ :type hours: list[int]
+ :param week_days: The days of the week.
+ :type week_days: list[str or ~azure_machine_learning_workspaces.models.DaysOfWeek]
+ """
+
+ _attribute_map = {
+ 'minutes': {'key': 'minutes', 'type': '[int]'},
+ 'hours': {'key': 'hours', 'type': '[int]'},
+ 'week_days': {'key': 'weekDays', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RecurrenceSchedule, self).__init__(**kwargs)
+ self.minutes = kwargs.get('minutes', None)
+ self.hours = kwargs.get('hours', None)
+ self.week_days = kwargs.get('week_days', None)
+
+
+class RegenerateEndpointKeysRequest(msrest.serialization.Model):
+ """RegenerateEndpointKeysRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_type: Required. Specification for which type of key to generate. Primary or
+ Secondary. Possible values include: "Primary", "Secondary".
+ :type key_type: str or ~azure_machine_learning_workspaces.models.KeyType
+ :param key_value: The value the key is set to.
+ :type key_value: str
+ """
+
+ _validation = {
+ 'key_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_type': {'key': 'keyType', 'type': 'str'},
+ 'key_value': {'key': 'keyValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RegenerateEndpointKeysRequest, self).__init__(**kwargs)
+ self.key_type = kwargs['key_type']
+ self.key_value = kwargs.get('key_value', None)
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = kwargs.get('passwords', None)
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class ResourceIdentity(msrest.serialization.Model):
+ """Service identity associated with a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: Client ID that is used when authenticating.
+ :vartype principal_id: str
+ :ivar tenant_id: AAD Tenant where this identity lives.
+ :vartype tenant_id: str
+ :param type: Defines values for a ResourceIdentity's type. Possible values include:
+ "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityAssignment
+ :param user_assigned_identities: Dictionary of the user assigned identities, key is ARM
+ resource ID of the UAI.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentityMeta]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentityMeta}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = kwargs.get('type', None)
+ self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = kwargs.get('reason_code', None)
+
+
+class Route(msrest.serialization.Model):
+ """Route.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param path: Required. The path for the route.
+ :type path: str
+ :param port: Required. The port for the route.
+ :type port: int
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port': {'required': True},
+ }
+
+ _attribute_map = {
+ 'path': {'key': 'path', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Route, self).__init__(**kwargs)
+ self.path = kwargs['path']
+ self.port = kwargs['port']
+
+
+class SasDatastoreCredentials(DatastoreCredentials):
+ """SAS datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: Storage container secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.SasDatastoreSecrets
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SasDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'Sas' # type: str
+ self.secrets = kwargs.get('secrets', None)
+
+
+class SasDatastoreSecrets(DatastoreSecrets):
+ """Datastore SAS secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param sas_token: Storage container SAS token.
+ :type sas_token: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'sas_token': {'key': 'sasToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SasDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'Sas' # type: str
+ self.sas_token = kwargs.get('sas_token', None)
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = kwargs['max_node_count']
+ self.min_node_count = kwargs.get('min_node_count', 0)
+ self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None)
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = kwargs.get('script_source', None)
+ self.script_data = kwargs.get('script_data', None)
+ self.script_arguments = kwargs.get('script_arguments', None)
+ self.timeout = kwargs.get('timeout', None)
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = kwargs.get('startup_script', None)
+ self.creation_script = kwargs.get('creation_script', None)
+
+
+class ServiceManagedResourcesSettings(msrest.serialization.Model):
+ """ServiceManagedResourcesSettings.
+
+ :param cosmos_db: The settings for the service managed cosmosdb account.
+ :type cosmos_db: ~azure_machine_learning_workspaces.models.CosmosDbSettings
+ """
+
+ _attribute_map = {
+ 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceManagedResourcesSettings, self).__init__(**kwargs)
+ self.cosmos_db = kwargs.get('cosmos_db', None)
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = kwargs['client_id']
+ self.client_secret = kwargs['client_secret']
+
+
+class ServicePrincipalDatastoreCredentials(DatastoreCredentials):
+ """Service Principal datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param secrets: Service principal secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.ServicePrincipalDatastoreSecrets
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ 'client_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'ServicePrincipal' # type: str
+ self.authority_url = kwargs.get('authority_url', None)
+ self.client_id = kwargs['client_id']
+ self.resource_uri = kwargs.get('resource_uri', None)
+ self.secrets = kwargs.get('secrets', None)
+ self.tenant_id = kwargs['tenant_id']
+
+
+class ServicePrincipalDatastoreSecrets(DatastoreSecrets):
+ """Datastore Service Principal secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param client_secret: Service principal secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'ServicePrincipal' # type: str
+ self.client_secret = kwargs.get('client_secret', None)
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = kwargs.get('scripts', None)
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.private_link_resource_id = kwargs.get('private_link_resource_id', None)
+ self.group_id = kwargs.get('group_id', None)
+ self.request_message = kwargs.get('request_message', None)
+ self.status = kwargs.get('status', None)
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.tier = kwargs.get('tier', None)
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.value = kwargs.get('value', None)
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class SqlAdminDatastoreCredentials(DatastoreCredentials):
+ """SQL Admin datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: SQL database secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.SqlAdminDatastoreSecrets
+ :param user_id: Required. SQL database user name.
+ :type user_id: str
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ 'user_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'SqlAdminDatastoreSecrets'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SqlAdminDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'SqlAdmin' # type: str
+ self.secrets = kwargs.get('secrets', None)
+ self.user_id = kwargs['user_id']
+
+
+class SqlAdminDatastoreSecrets(DatastoreSecrets):
+ """Datastore SQL Admin secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param password: SQL database password.
+ :type password: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SqlAdminDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'SqlAdmin' # type: str
+ self.password = kwargs.get('password', None)
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ :param leaf_domain_label: Leaf domain label of public endpoint.
+ :type leaf_domain_label: str
+ :param overwrite_existing_domain: Indicates whether to overwrite existing domain label.
+ :type overwrite_existing_domain: bool
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'},
+ 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.cert = kwargs.get('cert', None)
+ self.key = kwargs.get('key', None)
+ self.cname = kwargs.get('cname', None)
+ self.leaf_domain_label = kwargs.get('leaf_domain_label', None)
+ self.overwrite_existing_domain = kwargs.get('overwrite_existing_domain', None)
+
+
+class StatusMessage(msrest.serialization.Model):
+ """Active message associated with project.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Service-defined message code.
+ :vartype code: str
+ :ivar created_time_utc: Time in UTC at which the message was created.
+ :vartype created_time_utc: ~datetime.datetime
+ :ivar level: Severity level of message. Possible values include: "Error", "Information",
+ "Warning".
+ :vartype level: str or ~azure_machine_learning_workspaces.models.StatusMessageLevel
+ :ivar message: A human-readable representation of the message code.
+ :vartype message: str
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'created_time_utc': {'readonly': True},
+ 'level': {'readonly': True},
+ 'message': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
+ 'level': {'key': 'level', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(StatusMessage, self).__init__(**kwargs)
+ self.code = None
+ self.created_time_utc = None
+ self.level = None
+ self.message = None
+
+
+class SweepJob(JobBase):
+ """Sweep job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the job provisioning state. Possible values include:
+ "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param algorithm: Required. Type of the hyperparameter sampling algorithms. Possible values
+ include: "Grid", "Random", "Bayesian".
+ :type algorithm: str or ~azure_machine_learning_workspaces.models.SamplingAlgorithm
+ :param compute: Required. Compute binding for the job.
+ :type compute: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param early_termination: Early termination policies enable canceling poor-performing runs
+ before they complete.
+ :type early_termination: ~azure_machine_learning_workspaces.models.EarlyTerminationPolicy
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param identity: Identity configuration. If set, this should be one of AmlToken,
+ ManagedIdentity or null.
+ Defaults to AmlToken if null.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ :param max_concurrent_trials: An upper bound on the number of trials performed in parallel.
+ :type max_concurrent_trials: int
+ :param max_total_trials: An upper bound on the number of trials to perform.
+ :type max_total_trials: int
+ :param objective: Required. Optimization objective.
+ :type objective: ~azure_machine_learning_workspaces.models.Objective
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview feature and only available to users on the allow list.
+ :type priority: int
+ :param search_space: Required. A dictionary containing each parameter and its distribution. The
+ dictionary key is the name of the parameter.
+ :type search_space: dict[str, object]
+ :ivar status: The status of a job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused", "Unknown".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param timeout: The total timeout in ISO 8601 format. Only supports duration with precision as
+ low as Minutes.
+ :type timeout: ~datetime.timedelta
+ :param trial: Trial component definition.
+ :type trial: ~azure_machine_learning_workspaces.models.TrialComponent
+ """
+
+ _validation = {
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'algorithm': {'required': True},
+ 'compute': {'required': True},
+ 'objective': {'required': True},
+ 'output': {'readonly': True},
+ 'search_space': {'required': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'algorithm': {'key': 'algorithm', 'type': 'str'},
+ 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
+ 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'},
+ 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
+ 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
+ 'objective': {'key': 'objective', 'type': 'Objective'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'search_space': {'key': 'searchSpace', 'type': '{object}'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ 'trial': {'key': 'trial', 'type': 'TrialComponent'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SweepJob, self).__init__(**kwargs)
+ self.job_type = 'Sweep' # type: str
+ self.algorithm = kwargs['algorithm']
+ self.compute = kwargs['compute']
+ self.early_termination = kwargs.get('early_termination', None)
+ self.experiment_name = kwargs.get('experiment_name', None)
+ self.identity = kwargs.get('identity', None)
+ self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
+ self.max_total_trials = kwargs.get('max_total_trials', None)
+ self.objective = kwargs['objective']
+ self.output = None
+ self.priority = kwargs.get('priority', None)
+ self.search_space = kwargs['search_space']
+ self.status = None
+ self.timeout = kwargs.get('timeout', None)
+ self.trial = kwargs.get('trial', None)
+
+
+class SynapseSparkPoolProperties(msrest.serialization.Model):
+ """Properties specific to Synapse Spark pools.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSparkPoolProperties, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class SynapseSpark(Compute, SynapseSparkPoolProperties):
+ """A SynapseSpark compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSpark, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = kwargs.get('disable_local_auth', None)
+
+
+class SynapseSparkPoolPropertiesautogenerated(msrest.serialization.Model):
+ """AKS properties.
+
+ :param auto_scale_properties: Auto scale properties.
+ :type auto_scale_properties: ~azure_machine_learning_workspaces.models.AutoScaleProperties
+ :param auto_pause_properties: Auto pause properties.
+ :type auto_pause_properties: ~azure_machine_learning_workspaces.models.AutoPauseProperties
+ :param spark_version: Spark version.
+ :type spark_version: str
+ :param node_count: The number of compute nodes currently assigned to the compute.
+ :type node_count: int
+ :param node_size: Node size.
+ :type node_size: str
+ :param node_size_family: Node size family.
+ :type node_size_family: str
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param resource_group: Name of the resource group in which workspace is located.
+ :type resource_group: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param pool_name: Pool name.
+ :type pool_name: str
+ """
+
+ _attribute_map = {
+ 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'},
+ 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'},
+ 'spark_version': {'key': 'sparkVersion', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'node_size': {'key': 'nodeSize', 'type': 'str'},
+ 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'},
+ 'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
+ 'resource_group': {'key': 'resourceGroup', 'type': 'str'},
+ 'workspace_name': {'key': 'workspaceName', 'type': 'str'},
+ 'pool_name': {'key': 'poolName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSparkPoolPropertiesautogenerated, self).__init__(**kwargs)
+ self.auto_scale_properties = kwargs.get('auto_scale_properties', None)
+ self.auto_pause_properties = kwargs.get('auto_pause_properties', None)
+ self.spark_version = kwargs.get('spark_version', None)
+ self.node_count = kwargs.get('node_count', None)
+ self.node_size = kwargs.get('node_size', None)
+ self.node_size_family = kwargs.get('node_size_family', None)
+ self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs.get('resource_group', None)
+ self.workspace_name = kwargs.get('workspace_name', None)
+ self.pool_name = kwargs.get('pool_name', None)
+
+
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = kwargs.get('created_by', None)
+ self.created_by_type = kwargs.get('created_by_type', None)
+ self.created_at = kwargs.get('created_at', None)
+ self.last_modified_by = kwargs.get('last_modified_by', None)
+ self.last_modified_by_type = kwargs.get('last_modified_by_type', None)
+ self.last_modified_at = kwargs.get('last_modified_at', None)
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class TensorFlow(DistributionConfiguration):
+ """TensorFlow distribution configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param parameter_server_count: Number of parameter server tasks.
+ :type parameter_server_count: int
+ :param worker_count: Number of workers. Overwrites the node count in compute binding.
+ :type worker_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
+ 'worker_count': {'key': 'workerCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TensorFlow, self).__init__(**kwargs)
+ self.distribution_type = 'TensorFlow' # type: str
+ self.parameter_server_count = kwargs.get('parameter_server_count', None)
+ self.worker_count = kwargs.get('worker_count', None)
+
+
+class TrialComponent(msrest.serialization.Model):
+ """Trial component definition.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code_id: ARM resource ID of the code asset.
+ :type code_id: str
+ :param command: Required. The command to execute on startup of the job. eg. "python train.py".
+ :type command: str
+ :param distribution: Distribution configuration of the job. If set, this should be one of Mpi,
+ Tensorflow, PyTorch, or null.
+ :type distribution: ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ :param environment_id: The ARM resource ID of the Environment specification for the job.
+ :type environment_id: str
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param input_data_bindings: Mapping of input data bindings used in the job.
+ :type input_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.InputDataBinding]
+ :param output_data_bindings: Mapping of output data bindings used in the job.
+ :type output_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.OutputDataBinding]
+ :param timeout: The max run duration in ISO 8601 format, after which the trial component will
+ be cancelled.
+ Only supports duration with precision as low as Seconds.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'code_id': {'key': 'codeId', 'type': 'str'},
+ 'command': {'key': 'command', 'type': 'str'},
+ 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
+ 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TrialComponent, self).__init__(**kwargs)
+ self.code_id = kwargs.get('code_id', None)
+ self.command = kwargs['command']
+ self.distribution = kwargs.get('distribution', None)
+ self.environment_id = kwargs.get('environment_id', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.input_data_bindings = kwargs.get('input_data_bindings', None)
+ self.output_data_bindings = kwargs.get('output_data_bindings', None)
+ self.timeout = kwargs.get('timeout', None)
+
+
+class TruncationSelectionPolicy(EarlyTerminationPolicy):
+ """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param truncation_percentage: The percentage of runs to cancel at each evaluation interval.
+ :type truncation_percentage: int
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TruncationSelectionPolicy, self).__init__(**kwargs)
+ self.policy_type = 'TruncationSelection' # type: str
+ self.truncation_percentage = kwargs.get('truncation_percentage', None)
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = kwargs.get('limit', None)
+ self.unit = None
+ self.status = kwargs.get('status', None)
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = kwargs['admin_user_name']
+ self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None)
+ self.admin_user_password = kwargs.get('admin_user_password', None)
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class UserAssignedIdentityMeta(msrest.serialization.Model):
+ """User assigned identities associated with a resource.
+
+ :param client_id: Aka application ID, a unique identifier generated by Azure AD that is tied to
+ an application and service principal during its initial provisioning.
+ :type client_id: str
+ :param principal_id: The object ID of the service principal object for your managed identity
+ that is used to grant role-based access to an Azure resource.
+ :type principal_id: str
+ """
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentityMeta, self).__init__(**kwargs)
+ self.client_id = kwargs.get('client_id', None)
+ self.principal_id = kwargs.get('principal_id', None)
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ :param is_notebook_instance_compute: Indicates whether this compute will be used for running
+ notebooks.
+ :type is_notebook_instance_compute: bool
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = kwargs.get('virtual_machine_size', None)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+ self.is_notebook_instance_compute = kwargs.get('is_notebook_instance_compute', None)
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None)
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param value: The list of virtual machine sizes supported by AmlCompute.
+ :type value: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = kwargs.get('username', None)
+ self.password = kwargs.get('password', None)
+ self.public_key_data = kwargs.get('public_key_data', None)
+ self.private_key_data = kwargs.get('private_key_data', None)
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :ivar tenant_id: The tenant id associated with this workspace.
+ :vartype tenant_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.workspace_id = None
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.key_vault = kwargs.get('key_vault', None)
+ self.application_insights = kwargs.get('application_insights', None)
+ self.container_registry = kwargs.get('container_registry', None)
+ self.storage_account = kwargs.get('storage_account', None)
+ self.discovery_url = kwargs.get('discovery_url', None)
+ self.provisioning_state = None
+ self.encryption = kwargs.get('encryption', None)
+ self.hbi_workspace = kwargs.get('hbi_workspace', False)
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False)
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None)
+ self.notebook_info = None
+ self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None)
+ self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None)
+ self.tenant_id = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+ self.value_format = kwargs.get('value_format', None)
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = kwargs.get('restrictions', None)
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.identity = kwargs.get('identity', None)
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None)
+ self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
new file mode 100644
index 00000000000..f0db7fefaa9
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
@@ -0,0 +1,11055 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_machine_learning_workspaces_enums import *
+
+
+class DatastoreCredentials(msrest.serialization.Model):
+ """Base definition for datastore credentials.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, ServicePrincipalDatastoreCredentials, SqlAdminDatastoreCredentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', 'Certificate': 'CertificateDatastoreCredentials', 'None': 'NoneDatastoreCredentials', 'Sas': 'SasDatastoreCredentials', 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials', 'SqlAdmin': 'SqlAdminDatastoreCredentials'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = None # type: Optional[str]
+
+
+class AccountKeyDatastoreCredentials(DatastoreCredentials):
+ """Account key datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: Storage account secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.AccountKeyDatastoreSecrets
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'},
+ }
+
+ def __init__(
+ self,
+ *,
+ secrets: Optional["AccountKeyDatastoreSecrets"] = None,
+ **kwargs
+ ):
+ super(AccountKeyDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'AccountKey' # type: str
+ self.secrets = secrets
+
+
+class DatastoreSecrets(msrest.serialization.Model):
+ """Base definition for datastore secrets.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, NoneDatastoreSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets, SqlAdminDatastoreSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', 'None': 'NoneDatastoreSecrets', 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets', 'SqlAdmin': 'SqlAdminDatastoreSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = None # type: Optional[str]
+
+
+class AccountKeyDatastoreSecrets(DatastoreSecrets):
+ """Datastore account key secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param key: Storage account key.
+ :type key: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key: Optional[str] = None,
+ **kwargs
+ ):
+ super(AccountKeyDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'AccountKey' # type: str
+ self.key = key
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, SynapseSpark, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = disable_local_auth
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["AksProperties"] = None,
+ **kwargs
+ ):
+ super(Aks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = properties
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_kube_config: Optional[str] = None,
+ admin_kube_config: Optional[str] = None,
+ image_pull_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = user_kube_config
+ self.admin_kube_config = admin_kube_config
+ self.image_pull_secret_name = image_pull_secret_name
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ subnet_id: Optional[str] = None,
+ service_cidr: Optional[str] = None,
+ dns_service_ip: Optional[str] = None,
+ docker_bridge_cidr: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = subnet_id
+ self.service_cidr = service_cidr
+ self.dns_service_ip = dns_service_ip
+ self.docker_bridge_cidr = docker_bridge_cidr
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd",
+ "DenseProd", "DevTest". Default value: "FastProd".
+ :type cluster_purpose: str or ~azure_machine_learning_workspaces.models.ClusterPurpose
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ :param load_balancer_type: Load Balancer Type. Possible values include: "PublicIp",
+ "InternalLoadBalancer". Default value: "PublicIp".
+ :type load_balancer_type: str or ~azure_machine_learning_workspaces.models.LoadBalancerType
+ :param load_balancer_subnet: Load Balancer Subnet.
+ :type load_balancer_subnet: str
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'},
+ 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cluster_fqdn: Optional[str] = None,
+ agent_count: Optional[int] = None,
+ agent_vm_size: Optional[str] = None,
+ cluster_purpose: Optional[Union[str, "ClusterPurpose"]] = "FastProd",
+ ssl_configuration: Optional["SslConfiguration"] = None,
+ aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None,
+ load_balancer_type: Optional[Union[str, "LoadBalancerType"]] = "PublicIp",
+ load_balancer_subnet: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = cluster_fqdn
+ self.system_services = None
+ self.agent_count = agent_count
+ self.agent_vm_size = agent_vm_size
+ self.cluster_purpose = cluster_purpose
+ self.ssl_configuration = ssl_configuration
+ self.aks_networking_configuration = aks_networking_configuration
+ self.load_balancer_type = load_balancer_type
+ self.load_balancer_subnet = load_balancer_subnet
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["AmlComputeProperties"] = None,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = properties
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ os_type: Optional[Union[str, "OsType"]] = "Linux",
+ vm_size: Optional[str] = None,
+ vm_priority: Optional[Union[str, "VmPriority"]] = None,
+ virtual_machine_image: Optional["VirtualMachineImage"] = None,
+ isolated_network: Optional[bool] = None,
+ scale_settings: Optional["ScaleSettings"] = None,
+ user_account_credentials: Optional["UserAccountCredentials"] = None,
+ subnet: Optional["ResourceId"] = None,
+ remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified",
+ enable_node_public_ip: Optional[bool] = True,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = os_type
+ self.vm_size = vm_size
+ self.vm_priority = vm_priority
+ self.virtual_machine_image = virtual_machine_image
+ self.isolated_network = isolated_network
+ self.scale_settings = scale_settings
+ self.user_account_credentials = user_account_credentials
+ self.subnet = subnet
+ self.remote_login_port_public_access = remote_login_port_public_access
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = enable_node_public_ip
+
+
+class IdentityConfiguration(msrest.serialization.Model):
+ """Base definition for identity configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlToken, ManagedIdentity.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityConfigurationType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdentityConfiguration, self).__init__(**kwargs)
+ self.identity_type = None # type: Optional[str]
+
+
+class AmlToken(IdentityConfiguration):
+ """AML Token identity configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityConfigurationType
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlToken, self).__init__(**kwargs)
+ self.identity_type = 'AMLToken' # type: str
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ display_name: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = id
+ self.display_name = display_name
+ self.description = description
+
+
+class AssetReferenceBase(msrest.serialization.Model):
+ """Base definition for asset references.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DataPathAssetReference, IdAssetReference, OutputPathAssetReference.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssetReferenceBase, self).__init__(**kwargs)
+ self.reference_type = None # type: Optional[str]
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ object_id: str,
+ tenant_id: str,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = object_id
+ self.tenant_id = tenant_id
+
+
+class AutoPauseProperties(msrest.serialization.Model):
+ """Auto pause properties.
+
+ :param delay_in_minutes:
+ :type delay_in_minutes: int
+ :param enabled:
+ :type enabled: bool
+ """
+
+ _attribute_map = {
+ 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ delay_in_minutes: Optional[int] = None,
+ enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AutoPauseProperties, self).__init__(**kwargs)
+ self.delay_in_minutes = delay_in_minutes
+ self.enabled = enabled
+
+
+class AutoScaleProperties(msrest.serialization.Model):
+ """Auto scale properties.
+
+ :param min_node_count:
+ :type min_node_count: int
+ :param enabled:
+ :type enabled: bool
+ :param max_node_count:
+ :type max_node_count: int
+ """
+
+ _attribute_map = {
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ min_node_count: Optional[int] = None,
+ enabled: Optional[bool] = None,
+ max_node_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(AutoScaleProperties, self).__init__(**kwargs)
+ self.min_node_count = min_node_count
+ self.enabled = enabled
+ self.max_node_count = max_node_count
+
+
+class OnlineScaleSettings(msrest.serialization.Model):
+ """Online deployment scaling configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AutoScaleSettings, ManualScaleSettings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_instances: Maximum number of instances for this deployment.
+ :type max_instances: int
+ :param min_instances: Minimum number of instances for this deployment.
+ :type min_instances: int
+ :param scale_type: Required. Type of deployment scaling algorithm.Constant filled by server.
+ Possible values include: "Auto", "Manual".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleType
+ """
+
+ _validation = {
+ 'scale_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_instances': {'key': 'maxInstances', 'type': 'int'},
+ 'min_instances': {'key': 'minInstances', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'scale_type': {'Auto': 'AutoScaleSettings', 'Manual': 'ManualScaleSettings'}
+ }
+
+ def __init__(
+ self,
+ *,
+ max_instances: Optional[int] = None,
+ min_instances: Optional[int] = None,
+ **kwargs
+ ):
+ super(OnlineScaleSettings, self).__init__(**kwargs)
+ self.max_instances = max_instances
+ self.min_instances = min_instances
+ self.scale_type = None # type: Optional[str]
+
+
+class AutoScaleSettings(OnlineScaleSettings):
+ """AutoScaleSettings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_instances: Maximum number of instances for this deployment.
+ :type max_instances: int
+ :param min_instances: Minimum number of instances for this deployment.
+ :type min_instances: int
+ :param scale_type: Required. Type of deployment scaling algorithm.Constant filled by server.
+ Possible values include: "Auto", "Manual".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleType
+ :param polling_interval: The polling interval in ISO 8691 format. Only supports duration with
+ precision as low as Seconds.
+ :type polling_interval: ~datetime.timedelta
+ :param target_utilization_percentage: Target CPU usage for the autoscaler.
+ :type target_utilization_percentage: int
+ """
+
+ _validation = {
+ 'scale_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_instances': {'key': 'maxInstances', 'type': 'int'},
+ 'min_instances': {'key': 'minInstances', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'},
+ 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_instances: Optional[int] = None,
+ min_instances: Optional[int] = None,
+ polling_interval: Optional[datetime.timedelta] = None,
+ target_utilization_percentage: Optional[int] = None,
+ **kwargs
+ ):
+ super(AutoScaleSettings, self).__init__(max_instances=max_instances, min_instances=min_instances, **kwargs)
+ self.scale_type = 'Auto' # type: str
+ self.polling_interval = polling_interval
+ self.target_utilization_percentage = target_utilization_percentage
+
+
+class DatastoreContents(msrest.serialization.Model):
+ """Base definition for datastore contents configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AzureBlobContents, AzureDataLakeGen1Contents, AzureDataLakeGen2Contents, AzureFileContents, AzurePostgreSqlContents, AzureSqlDatabaseContents, GlusterFsContents.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'contents_type': {'AzureBlob': 'AzureBlobContents', 'AzureDataLakeGen1': 'AzureDataLakeGen1Contents', 'AzureDataLakeGen2': 'AzureDataLakeGen2Contents', 'AzureFile': 'AzureFileContents', 'AzurePostgreSql': 'AzurePostgreSqlContents', 'AzureSqlDatabase': 'AzureSqlDatabaseContents', 'GlusterFs': 'GlusterFsContents'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatastoreContents, self).__init__(**kwargs)
+ self.contents_type = None # type: Optional[str]
+
+
+class AzureBlobContents(DatastoreContents):
+ """Azure Blob datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ account_name: str,
+ container_name: str,
+ credentials: "DatastoreCredentials",
+ endpoint: str,
+ protocol: str,
+ **kwargs
+ ):
+ super(AzureBlobContents, self).__init__(**kwargs)
+ self.contents_type = 'AzureBlob' # type: str
+ self.account_name = account_name
+ self.container_name = container_name
+ self.credentials = credentials
+ self.endpoint = endpoint
+ self.protocol = protocol
+
+
+class AzureDataLakeGen1Contents(DatastoreContents):
+ """Azure Data Lake Gen1 datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param store_name: Required. Azure Data Lake store name.
+ :type store_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'credentials': {'required': True},
+ 'store_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'store_name': {'key': 'storeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ credentials: "DatastoreCredentials",
+ store_name: str,
+ **kwargs
+ ):
+ super(AzureDataLakeGen1Contents, self).__init__(**kwargs)
+ self.contents_type = 'AzureDataLakeGen1' # type: str
+ self.credentials = credentials
+ self.store_name = store_name
+
+
+class AzureDataLakeGen2Contents(DatastoreContents):
+ """Azure Data Lake Gen2 datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ account_name: str,
+ container_name: str,
+ credentials: "DatastoreCredentials",
+ endpoint: str,
+ protocol: str,
+ **kwargs
+ ):
+ super(AzureDataLakeGen2Contents, self).__init__(**kwargs)
+ self.contents_type = 'AzureDataLakeGen2' # type: str
+ self.account_name = account_name
+ self.container_name = container_name
+ self.credentials = credentials
+ self.endpoint = endpoint
+ self.protocol = protocol
+
+
+class AzureFileContents(DatastoreContents):
+ """Azure File datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param account_name: Required. Storage account name.
+ :type account_name: str
+ :param container_name: Required. Storage account container name.
+ :type container_name: str
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param endpoint: Required. Azure cloud endpoint for the storage account.
+ :type endpoint: str
+ :param protocol: Required. Protocol used to communicate with the storage account.
+ :type protocol: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'credentials': {'required': True},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'account_name': {'key': 'accountName', 'type': 'str'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'protocol': {'key': 'protocol', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ account_name: str,
+ container_name: str,
+ credentials: "DatastoreCredentials",
+ endpoint: str,
+ protocol: str,
+ **kwargs
+ ):
+ super(AzureFileContents, self).__init__(**kwargs)
+ self.contents_type = 'AzureFile' # type: str
+ self.account_name = account_name
+ self.container_name = container_name
+ self.credentials = credentials
+ self.endpoint = endpoint
+ self.protocol = protocol
+
+
+class AzurePostgreSqlContents(DatastoreContents):
+ """Azure Postgre SQL datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param enable_ssl: Whether the Azure PostgreSQL server requires SSL.
+ :type enable_ssl: bool
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'enable_ssl': {'key': 'enableSSL', 'type': 'bool'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ credentials: "DatastoreCredentials",
+ database_name: str,
+ endpoint: str,
+ port_number: int,
+ server_name: str,
+ enable_ssl: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AzurePostgreSqlContents, self).__init__(**kwargs)
+ self.contents_type = 'AzurePostgreSql' # type: str
+ self.credentials = credentials
+ self.database_name = database_name
+ self.enable_ssl = enable_ssl
+ self.endpoint = endpoint
+ self.port_number = port_number
+ self.server_name = server_name
+
+
+class AzureSqlDatabaseContents(DatastoreContents):
+ """Azure SQL Database datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param credentials: Required. Account credentials.
+ :type credentials: ~azure_machine_learning_workspaces.models.DatastoreCredentials
+ :param database_name: Required. Azure SQL database name.
+ :type database_name: str
+ :param endpoint: Required. Azure cloud endpoint for the database.
+ :type endpoint: str
+ :param port_number: Required. Azure SQL server port.
+ :type port_number: int
+ :param server_name: Required. Azure SQL server name.
+ :type server_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'credentials': {'required': True},
+ 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port_number': {'required': True},
+ 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'},
+ 'database_name': {'key': 'databaseName', 'type': 'str'},
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'port_number': {'key': 'portNumber', 'type': 'int'},
+ 'server_name': {'key': 'serverName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ credentials: "DatastoreCredentials",
+ database_name: str,
+ endpoint: str,
+ port_number: int,
+ server_name: str,
+ **kwargs
+ ):
+ super(AzureSqlDatabaseContents, self).__init__(**kwargs)
+ self.contents_type = 'AzureSqlDatabase' # type: str
+ self.credentials = credentials
+ self.database_name = database_name
+ self.endpoint = endpoint
+ self.port_number = port_number
+ self.server_name = server_name
+
+
+class EarlyTerminationPolicy(msrest.serialization.Model):
+ """Early termination policies enable canceling poor-performing runs before they complete.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: BanditPolicy, MedianStoppingPolicy, TruncationSelectionPolicy.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', 'TruncationSelection': 'TruncationSelectionPolicy'}
+ }
+
+ def __init__(
+ self,
+ *,
+ delay_evaluation: Optional[int] = None,
+ evaluation_interval: Optional[int] = None,
+ **kwargs
+ ):
+ super(EarlyTerminationPolicy, self).__init__(**kwargs)
+ self.delay_evaluation = delay_evaluation
+ self.evaluation_interval = evaluation_interval
+ self.policy_type = None # type: Optional[str]
+
+
+class BanditPolicy(EarlyTerminationPolicy):
+ """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param slack_amount: Absolute distance allowed from the best performing run.
+ :type slack_amount: float
+ :param slack_factor: Ratio of the allowed distance from the best performing run.
+ :type slack_factor: float
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'slack_amount': {'key': 'slackAmount', 'type': 'float'},
+ 'slack_factor': {'key': 'slackFactor', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ *,
+ delay_evaluation: Optional[int] = None,
+ evaluation_interval: Optional[int] = None,
+ slack_amount: Optional[float] = None,
+ slack_factor: Optional[float] = None,
+ **kwargs
+ ):
+ super(BanditPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs)
+ self.policy_type = 'Bandit' # type: str
+ self.slack_amount = slack_amount
+ self.slack_factor = slack_factor
+
+
+class BatchDeployment(msrest.serialization.Model):
+ """Batch inference settings per deployment.
+
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param compute: Configuration for compute binding.
+ :type compute: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param error_threshold: Error threshold, if the error count for the entire input goes above
+ this value,
+ the batch inference will be aborted. Range is [-1, int.MaxValue].
+ For FileDataset, this value is the count of file failures.
+ For TabularDataset, this value is the count of record failures.
+ If set to -1 (the lower bound), all failures during batch inference will be ignored.
+ :type error_threshold: int
+ :param logging_level: Logging level for batch inference operation. Possible values include:
+ "Info", "Warning", "Debug".
+ :type logging_level: str or ~azure_machine_learning_workspaces.models.BatchLoggingLevel
+ :param mini_batch_size: Size of the mini-batch passed to each batch invocation.
+ For FileDataset, this is the number of files per mini-batch.
+ For TabularDataset, this is the size of the records in bytes, per mini-batch.
+ :type mini_batch_size: long
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param output_configuration: Output configuration for the batch inference operation.
+ :type output_configuration: ~azure_machine_learning_workspaces.models.BatchOutputConfiguration
+ :param partition_keys: Partition keys list used for Named partitioning.
+ :type partition_keys: list[str]
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :param retry_settings: Retry Settings for the batch inference operation.
+ :type retry_settings: ~azure_machine_learning_workspaces.models.BatchRetrySettings
+ """
+
+ _attribute_map = {
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'error_threshold': {'key': 'errorThreshold', 'type': 'int'},
+ 'logging_level': {'key': 'loggingLevel', 'type': 'str'},
+ 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'output_configuration': {'key': 'outputConfiguration', 'type': 'BatchOutputConfiguration'},
+ 'partition_keys': {'key': 'partitionKeys', 'type': '[str]'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ code_configuration: Optional["CodeConfiguration"] = None,
+ compute: Optional["ComputeConfiguration"] = None,
+ description: Optional[str] = None,
+ environment_id: Optional[str] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ error_threshold: Optional[int] = None,
+ logging_level: Optional[Union[str, "BatchLoggingLevel"]] = None,
+ mini_batch_size: Optional[int] = None,
+ model: Optional["AssetReferenceBase"] = None,
+ output_configuration: Optional["BatchOutputConfiguration"] = None,
+ partition_keys: Optional[List[str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ retry_settings: Optional["BatchRetrySettings"] = None,
+ **kwargs
+ ):
+ super(BatchDeployment, self).__init__(**kwargs)
+ self.code_configuration = code_configuration
+ self.compute = compute
+ self.description = description
+ self.environment_id = environment_id
+ self.environment_variables = environment_variables
+ self.error_threshold = error_threshold
+ self.logging_level = logging_level
+ self.mini_batch_size = mini_batch_size
+ self.model = model
+ self.output_configuration = output_configuration
+ self.partition_keys = partition_keys
+ self.properties = properties
+ self.retry_settings = retry_settings
+
+
+class Resource(msrest.serialization.Model):
+ """Common fields that are returned in the response for all Azure Resource Manager resources.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+
+
+class TrackedResource(Resource):
+ """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: str,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(TrackedResource, self).__init__(**kwargs)
+ self.tags = tags
+ self.location = location
+
+
+class BatchDeploymentTrackedResource(TrackedResource):
+ """BatchDeploymentTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.BatchDeployment
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'BatchDeployment'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: str,
+ properties: "BatchDeployment",
+ tags: Optional[Dict[str, str]] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ **kwargs
+ ):
+ super(BatchDeploymentTrackedResource, self).__init__(tags=tags, location=location, **kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.properties = properties
+ self.system_data = None
+
+
+class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of BatchDeployment entities.
+
+ :param next_link: The link to the next page of BatchDeployment objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type BatchDeployment.
+ :type value: list[~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[BatchDeploymentTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["BatchDeploymentTrackedResource"]] = None,
+ **kwargs
+ ):
+ super(BatchDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class BatchEndpoint(msrest.serialization.Model):
+ """Batch endpoint configuration.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param auth_mode: Enum to determine endpoint authentication mode. Possible values include:
+ "AMLToken", "Key", "AADToken".
+ :type auth_mode: str or ~azure_machine_learning_workspaces.models.EndpointAuthMode
+ :param description: Description of the inference endpoint.
+ :type description: str
+ :param keys: EndpointAuthKeys to set initially on an Endpoint.
+ This property will always be returned as null. AuthKey values must be retrieved using the
+ ListKeys API.
+ :type keys: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar scoring_uri: Endpoint URI.
+ :vartype scoring_uri: str
+ :ivar swagger_uri: Endpoint Swagger URI.
+ :vartype swagger_uri: str
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _validation = {
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'auth_mode': {'key': 'authMode', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ auth_mode: Optional[Union[str, "EndpointAuthMode"]] = None,
+ description: Optional[str] = None,
+ keys: Optional["EndpointAuthKeys"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ traffic: Optional[Dict[str, int]] = None,
+ **kwargs
+ ):
+ super(BatchEndpoint, self).__init__(**kwargs)
+ self.auth_mode = auth_mode
+ self.description = description
+ self.keys = keys
+ self.properties = properties
+ self.scoring_uri = None
+ self.swagger_uri = None
+ self.traffic = traffic
+
+
+class BatchEndpointTrackedResource(TrackedResource):
+ """BatchEndpointTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.BatchEndpoint
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'BatchEndpoint'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: str,
+ properties: "BatchEndpoint",
+ tags: Optional[Dict[str, str]] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ **kwargs
+ ):
+ super(BatchEndpointTrackedResource, self).__init__(tags=tags, location=location, **kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.properties = properties
+ self.system_data = None
+
+
+class BatchEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of BatchEndpoint entities.
+
+ :param next_link: The link to the next page of BatchEndpoint objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type BatchEndpoint.
+ :type value: list[~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[BatchEndpointTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["BatchEndpointTrackedResource"]] = None,
+ **kwargs
+ ):
+ super(BatchEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class BatchOutputConfiguration(msrest.serialization.Model):
+ """Batch inference output configuration.
+
+ :param append_row_file_name: Customized output file name for append_row output action.
+ :type append_row_file_name: str
+ :param output_action: Indicates how the output will be organized. Possible values include:
+ "SummaryOnly", "AppendRow".
+ :type output_action: str or ~azure_machine_learning_workspaces.models.BatchOutputAction
+ """
+
+ _attribute_map = {
+ 'append_row_file_name': {'key': 'appendRowFileName', 'type': 'str'},
+ 'output_action': {'key': 'outputAction', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ append_row_file_name: Optional[str] = None,
+ output_action: Optional[Union[str, "BatchOutputAction"]] = None,
+ **kwargs
+ ):
+ super(BatchOutputConfiguration, self).__init__(**kwargs)
+ self.append_row_file_name = append_row_file_name
+ self.output_action = output_action
+
+
+class BatchRetrySettings(msrest.serialization.Model):
+ """Retry settings for a batch inference operation.
+
+ :param max_retries: Maximum retry count for a mini-batch.
+ :type max_retries: int
+ :param timeout: Invocation timeout for a mini-batch, in ISO 8601 format.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _attribute_map = {
+ 'max_retries': {'key': 'maxRetries', 'type': 'int'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_retries: Optional[int] = None,
+ timeout: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(BatchRetrySettings, self).__init__(**kwargs)
+ self.max_retries = max_retries
+ self.timeout = timeout
+
+
+class CertificateDatastoreCredentials(DatastoreCredentials):
+ """Certificate datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param secrets: Service principal secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.CertificateDatastoreSecrets
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ :param thumbprint: Required. Thumbprint of the certificate used for authentication.
+ :type thumbprint: str
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ 'client_id': {'required': True},
+ 'tenant_id': {'required': True},
+ 'thumbprint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'thumbprint': {'key': 'thumbprint', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: str,
+ tenant_id: str,
+ thumbprint: str,
+ authority_url: Optional[str] = None,
+ resource_uri: Optional[str] = None,
+ secrets: Optional["CertificateDatastoreSecrets"] = None,
+ **kwargs
+ ):
+ super(CertificateDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'Certificate' # type: str
+ self.authority_url = authority_url
+ self.client_id = client_id
+ self.resource_uri = resource_uri
+ self.secrets = secrets
+ self.tenant_id = tenant_id
+ self.thumbprint = thumbprint
+
+
+class CertificateDatastoreSecrets(DatastoreSecrets):
+ """Datastore certificate secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param certificate: Service principal certificate.
+ :type certificate: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'certificate': {'key': 'certificate', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ certificate: Optional[str] = None,
+ **kwargs
+ ):
+ super(CertificateDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'Certificate' # type: str
+ self.certificate = certificate
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scale_settings: Optional["ScaleSettings"] = None,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = scale_settings
+
+
+class ExportSummary(msrest.serialization.Model):
+ """ExportSummary.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: CsvExportSummary, CocoExportSummary, DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ }
+
+ _subtype_map = {
+ 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ExportSummary, self).__init__(**kwargs)
+ self.end_time_utc = None
+ self.exported_row_count = None
+ self.format = None # type: Optional[str]
+ self.labeling_job_id = None
+ self.start_time_utc = None
+
+
+class CocoExportSummary(ExportSummary):
+ """CocoExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'container_name': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CocoExportSummary, self).__init__(**kwargs)
+ self.format = 'Coco' # type: str
+ self.container_name = None
+ self.snapshot_path = None
+
+
+class CodeConfiguration(msrest.serialization.Model):
+ """Configuration for a scoring code asset.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code_id: ARM resource ID of the code asset.
+ :type code_id: str
+ :param scoring_script: Required. The script to execute on startup. eg. "score.py".
+ :type scoring_script: str
+ """
+
+ _validation = {
+ 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'code_id': {'key': 'codeId', 'type': 'str'},
+ 'scoring_script': {'key': 'scoringScript', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scoring_script: str,
+ code_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(CodeConfiguration, self).__init__(**kwargs)
+ self.code_id = code_id
+ self.scoring_script = scoring_script
+
+
+class CodeContainer(msrest.serialization.Model):
+ """Container for code asset versions.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(CodeContainer, self).__init__(**kwargs)
+ self.description = description
+ self.properties = properties
+ self.tags = tags
+
+
+class CodeContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.CodeContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'CodeContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "CodeContainer",
+ **kwargs
+ ):
+ super(CodeContainerResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeContainer entities.
+
+ :param next_link: The link to the next page of CodeContainer objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type CodeContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[CodeContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["CodeContainerResource"]] = None,
+ **kwargs
+ ):
+ super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class CodeVersion(msrest.serialization.Model):
+ """Code asset version details.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param description: The asset description text.
+ :type description: str
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param path: Required. The path of the file/directory in the datastore.
+ :type path: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: str,
+ datastore_id: Optional[str] = None,
+ description: Optional[str] = None,
+ is_anonymous: Optional[bool] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(CodeVersion, self).__init__(**kwargs)
+ self.datastore_id = datastore_id
+ self.description = description
+ self.is_anonymous = is_anonymous
+ self.path = path
+ self.properties = properties
+ self.tags = tags
+
+
+class CodeVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.CodeVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'CodeVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "CodeVersion",
+ **kwargs
+ ):
+ super(CodeVersionResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of CodeVersion entities.
+
+ :param next_link: The link to the next page of CodeVersion objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type CodeVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.CodeVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[CodeVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["CodeVersionResource"]] = None,
+ **kwargs
+ ):
+ super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class JobBase(msrest.serialization.Model):
+ """Base definition for a job.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: CommandJob, SweepJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the job provisioning state. Possible values include:
+ "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ _subtype_map = {
+ 'job_type': {'Command': 'CommandJob', 'Sweep': 'SweepJob'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(JobBase, self).__init__(**kwargs)
+ self.description = description
+ self.interaction_endpoints = None
+ self.job_type = None # type: Optional[str]
+ self.properties = properties
+ self.provisioning_state = None
+ self.tags = tags
+
+
+class CommandJob(JobBase):
+ """Command job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the job provisioning state. Possible values include:
+ "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param code_id: ARM resource ID of the code asset.
+ :type code_id: str
+ :param command: Required. The command to execute on startup of the job. eg. "python train.py".
+ :type command: str
+ :param compute: Required. Compute binding for the job.
+ :type compute: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param distribution: Distribution configuration of the job. If set, this should be one of Mpi,
+ Tensorflow, PyTorch, or null.
+ :type distribution: ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ :param environment_id: The ARM resource ID of the Environment specification for the job.
+ :type environment_id: str
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param identity: Identity configuration. If set, this should be one of AmlToken,
+ ManagedIdentity, or null.
+ Defaults to AmlToken if null.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ :param input_data_bindings: Mapping of input data bindings used in the job.
+ :type input_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.InputDataBinding]
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param output_data_bindings: Mapping of output data bindings used in the job.
+ :type output_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.OutputDataBinding]
+ :ivar parameters: Input parameters.
+ :vartype parameters: dict[str, object]
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview feature and only available to users on the allow list.
+ :type priority: int
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused", "Unknown".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param timeout: The max run duration in ISO 8601 format, after which the job will be cancelled.
+ Only supports duration with precision as low as Seconds.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ 'compute': {'required': True},
+ 'output': {'readonly': True},
+ 'parameters': {'readonly': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'code_id': {'key': 'codeId', 'type': 'str'},
+ 'command': {'key': 'command', 'type': 'str'},
+ 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
+ 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'},
+ 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
+ 'parameters': {'key': 'parameters', 'type': '{object}'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ command: str,
+ compute: "ComputeConfiguration",
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ code_id: Optional[str] = None,
+ distribution: Optional["DistributionConfiguration"] = None,
+ environment_id: Optional[str] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ experiment_name: Optional[str] = None,
+ identity: Optional["IdentityConfiguration"] = None,
+ input_data_bindings: Optional[Dict[str, "InputDataBinding"]] = None,
+ output_data_bindings: Optional[Dict[str, "OutputDataBinding"]] = None,
+ priority: Optional[int] = None,
+ timeout: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(CommandJob, self).__init__(description=description, properties=properties, tags=tags, **kwargs)
+ self.job_type = 'Command' # type: str
+ self.code_id = code_id
+ self.command = command
+ self.compute = compute
+ self.distribution = distribution
+ self.environment_id = environment_id
+ self.environment_variables = environment_variables
+ self.experiment_name = experiment_name
+ self.identity = identity
+ self.input_data_bindings = input_data_bindings
+ self.output = None
+ self.output_data_bindings = output_data_bindings
+ self.parameters = None
+ self.priority = priority
+ self.status = None
+ self.timeout = timeout
+
+
+class Components1D3SwueSchemasComputeresourceAllof1(msrest.serialization.Model):
+ """Components1D3SwueSchemasComputeresourceAllof1.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["Compute"] = None,
+ **kwargs
+ ):
+ super(Components1D3SwueSchemasComputeresourceAllof1, self).__init__(**kwargs)
+ self.properties = properties
+
+
+class ComputeConfiguration(msrest.serialization.Model):
+ """Configuration for compute binding.
+
+ :param instance_count: Number of instances or nodes.
+ :type instance_count: int
+ :param instance_type: SKU type to run on.
+ :type instance_type: str
+ :param is_local: Set to true for jobs running on local compute.
+ :type is_local: bool
+ :param location: Location for virtual cluster run.
+ :type location: str
+ :param properties: Additional properties.
+ :type properties: dict[str, str]
+ :param target: ARM resource ID of the compute resource.
+ :type target: str
+ """
+
+ _attribute_map = {
+ 'instance_count': {'key': 'instanceCount', 'type': 'int'},
+ 'instance_type': {'key': 'instanceType', 'type': 'str'},
+ 'is_local': {'key': 'isLocal', 'type': 'bool'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'target': {'key': 'target', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ instance_count: Optional[int] = None,
+ instance_type: Optional[str] = None,
+ is_local: Optional[bool] = None,
+ location: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ target: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeConfiguration, self).__init__(**kwargs)
+ self.instance_count = instance_count
+ self.instance_type = instance_type
+ self.is_local = is_local
+ self.location = location
+ self.properties = properties
+ self.target = target
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["ComputeInstanceProperties"] = None,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = properties
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ endpoint_uri: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.endpoint_uri = endpoint_uri
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ operation_name: Optional[Union[str, "OperationName"]] = None,
+ operation_time: Optional[datetime.datetime] = None,
+ operation_status: Optional[Union[str, "OperationStatus"]] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = operation_name
+ self.operation_time = operation_time
+ self.operation_status = operation_status
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ :param schedules: The list of schedules to be applied on the compute instance.
+ :type schedules: ~azure_machine_learning_workspaces.models.ComputeSchedules
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vm_size: Optional[str] = None,
+ subnet: Optional["ResourceId"] = None,
+ application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared",
+ ssh_settings: Optional["ComputeInstanceSshSettings"] = None,
+ compute_instance_authorization_type: Optional[Union[str, "ComputeInstanceAuthorizationType"]] = "personal",
+ personal_compute_instance_settings: Optional["PersonalComputeInstanceSettings"] = None,
+ setup_scripts: Optional["SetupScripts"] = None,
+ schedules: Optional["ComputeSchedules"] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = vm_size
+ self.subnet = subnet
+ self.application_sharing_policy = application_sharing_policy
+ self.ssh_settings = ssh_settings
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = compute_instance_authorization_type
+ self.personal_compute_instance_settings = personal_compute_instance_settings
+ self.setup_scripts = setup_scripts
+ self.last_operation = None
+ self.schedules = schedules
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled",
+ admin_public_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = ssh_public_access
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = admin_public_key
+
+
+class ComputeResource(Resource, Components1D3SwueSchemasComputeresourceAllof1):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["Compute"] = None,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(properties=properties, **kwargs)
+ self.properties = properties
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.id = None
+ self.name = None
+ self.type = None
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+
+
+class ComputeSchedules(msrest.serialization.Model):
+ """The list of schedules to be applied on the computes.
+
+ :param compute_start_stop: The list of compute start stop schedules to be applied.
+ :type compute_start_stop:
+ list[~azure_machine_learning_workspaces.models.ComputeStartStopSchedule]
+ """
+
+ _attribute_map = {
+ 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_start_stop: Optional[List["ComputeStartStopSchedule"]] = None,
+ **kwargs
+ ):
+ super(ComputeSchedules, self).__init__(**kwargs)
+ self.compute_start_stop = compute_start_stop
+
+
+class ComputeStartStopSchedule(msrest.serialization.Model):
+ """Compute start stop schedule properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Schedule id.
+ :vartype id: str
+ :ivar provisioning_status: The current deployment state of schedule. Possible values include:
+ "Completed", "Provisioning", "Failed".
+ :vartype provisioning_status: str or
+ ~azure_machine_learning_workspaces.models.ProvisioningStatus
+ :param status: The schedule status. Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.ScheduleStatus
+ :param trigger_type: The schedule trigger type. Possible values include: "Recurrence", "Cron".
+ :type trigger_type: str or ~azure_machine_learning_workspaces.models.TriggerType
+ :param action: The compute power action. Possible values include: "Start", "Stop".
+ :type action: str or ~azure_machine_learning_workspaces.models.ComputePowerAction
+ :param recurrence: The workflow trigger recurrence for ComputeStartStop schedule type.
+ :type recurrence: ~azure_machine_learning_workspaces.models.Recurrence
+ :param cron: The workflow trigger cron for ComputeStartStop schedule type.
+ :type cron: ~azure_machine_learning_workspaces.models.Cron
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'provisioning_status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'trigger_type': {'key': 'triggerType', 'type': 'str'},
+ 'action': {'key': 'action', 'type': 'str'},
+ 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
+ 'cron': {'key': 'cron', 'type': 'Cron'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "ScheduleStatus"]] = None,
+ trigger_type: Optional[Union[str, "TriggerType"]] = None,
+ action: Optional[Union[str, "ComputePowerAction"]] = None,
+ recurrence: Optional["Recurrence"] = None,
+ cron: Optional["Cron"] = None,
+ **kwargs
+ ):
+ super(ComputeStartStopSchedule, self).__init__(**kwargs)
+ self.id = None
+ self.provisioning_status = None
+ self.status = status
+ self.trigger_type = trigger_type
+ self.action = action
+ self.recurrence = recurrence
+ self.cron = cron
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The minimum amount of CPU cores to be used by the container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu: float
+ :param cpu_limit: The maximum amount of CPU cores allowed to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu_limit: float
+ :param memory_in_gb: The minimum amount of memory (in GB) to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb: float
+ :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to be used by the
+ container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb_limit: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cpu: Optional[float] = None,
+ cpu_limit: Optional[float] = None,
+ memory_in_gb: Optional[float] = None,
+ memory_in_gb_limit: Optional[float] = None,
+ gpu: Optional[int] = None,
+ fpga: Optional[int] = None,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = cpu
+ self.cpu_limit = cpu_limit
+ self.memory_in_gb = memory_in_gb
+ self.memory_in_gb_limit = memory_in_gb_limit
+ self.gpu = gpu
+ self.fpga = fpga
+
+
+class CosmosDbSettings(msrest.serialization.Model):
+ """CosmosDbSettings.
+
+ :param collections_throughput: The throughput of the collections in cosmosdb database.
+ :type collections_throughput: int
+ """
+
+ _attribute_map = {
+ 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ collections_throughput: Optional[int] = None,
+ **kwargs
+ ):
+ super(CosmosDbSettings, self).__init__(**kwargs)
+ self.collections_throughput = collections_throughput
+
+
+class Cron(msrest.serialization.Model):
+ """The workflow trigger cron for ComputeStartStop schedule type.
+
+ :param start_time: The start time.
+ :type start_time: str
+ :param time_zone: The time zone.
+ :type time_zone: str
+ :param expression: The cron expression.
+ :type expression: str
+ """
+
+ _attribute_map = {
+ 'start_time': {'key': 'startTime', 'type': 'str'},
+ 'time_zone': {'key': 'timeZone', 'type': 'str'},
+ 'expression': {'key': 'expression', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ start_time: Optional[str] = None,
+ time_zone: Optional[str] = None,
+ expression: Optional[str] = None,
+ **kwargs
+ ):
+ super(Cron, self).__init__(**kwargs)
+ self.start_time = start_time
+ self.time_zone = time_zone
+ self.expression = expression
+
+
+class CsvExportSummary(ExportSummary):
+ """CsvExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar container_name: The container name to which the labels will be exported.
+ :vartype container_name: str
+ :ivar snapshot_path: The output path where the labels will be exported.
+ :vartype snapshot_path: str
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'container_name': {'readonly': True},
+ 'snapshot_path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'container_name': {'key': 'containerName', 'type': 'str'},
+ 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CsvExportSummary, self).__init__(**kwargs)
+ self.format = 'CSV' # type: str
+ self.container_name = None
+ self.snapshot_path = None
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["DatabricksProperties"] = None,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = properties
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = databricks_access_token
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ :param workspace_url: Workspace Url.
+ :type workspace_url: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ workspace_url: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = databricks_access_token
+ self.workspace_url = workspace_url
+
+
+class DataContainer(msrest.serialization.Model):
+ """Container for data asset versions.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(DataContainer, self).__init__(**kwargs)
+ self.description = description
+ self.properties = properties
+ self.tags = tags
+
+
+class DataContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.DataContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'DataContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "DataContainer",
+ **kwargs
+ ):
+ super(DataContainerResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class DataContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataContainer entities.
+
+ :param next_link: The link to the next page of DataContainer objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type DataContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.DataContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[DataContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["DataContainerResource"]] = None,
+ **kwargs
+ ):
+ super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["DataLakeAnalyticsProperties"] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = properties
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_lake_store_account_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = data_lake_store_account_name
+
+
+class DataPathAssetReference(AssetReferenceBase):
+ """Reference to an asset via its path in a datastore.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param path: The path of the file/directory in the datastore.
+ :type path: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ datastore_id: Optional[str] = None,
+ path: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'DataPath' # type: str
+ self.datastore_id = datastore_id
+ self.path = path
+
+
+class DatasetExportSummary(ExportSummary):
+ """DatasetExportSummary.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar end_time_utc: The time when the export was completed.
+ :vartype end_time_utc: ~datetime.datetime
+ :ivar exported_row_count: The total number of labeled datapoints exported.
+ :vartype exported_row_count: long
+ :param format: Required. The format of exported labels, also as the discriminator.Constant
+ filled by server. Possible values include: "Dataset", "Coco", "CSV".
+ :type format: str or ~azure_machine_learning_workspaces.models.ExportFormatType
+ :ivar labeling_job_id: Name and identifier of the job containing exported labels.
+ :vartype labeling_job_id: str
+ :ivar start_time_utc: The time when the export was requested.
+ :vartype start_time_utc: ~datetime.datetime
+ :ivar labeled_asset_name: The unique name of the labeled data asset.
+ :vartype labeled_asset_name: str
+ """
+
+ _validation = {
+ 'end_time_utc': {'readonly': True},
+ 'exported_row_count': {'readonly': True},
+ 'format': {'required': True},
+ 'labeling_job_id': {'readonly': True},
+ 'start_time_utc': {'readonly': True},
+ 'labeled_asset_name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+ 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'},
+ 'format': {'key': 'format', 'type': 'str'},
+ 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'},
+ 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+ 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatasetExportSummary, self).__init__(**kwargs)
+ self.format = 'Dataset' # type: str
+ self.labeled_asset_name = None
+
+
+class DatastoreProperties(msrest.serialization.Model):
+ """Datastore definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents: Required. Reference to the datastore storage contents.
+ :type contents: ~azure_machine_learning_workspaces.models.DatastoreContents
+ :param description: The asset description text.
+ :type description: str
+ :ivar has_been_validated: Whether the service has validated access to the datastore with the
+ provided credentials.
+ :vartype has_been_validated: bool
+ :param is_default: Whether this datastore is the default for the workspace.
+ :type is_default: bool
+ :param linked_info: Information about the datastore origin, if linked.
+ :type linked_info: ~azure_machine_learning_workspaces.models.LinkedInfo
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'contents': {'required': True},
+ 'has_been_validated': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'contents': {'key': 'contents', 'type': 'DatastoreContents'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'has_been_validated': {'key': 'hasBeenValidated', 'type': 'bool'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'linked_info': {'key': 'linkedInfo', 'type': 'LinkedInfo'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ contents: "DatastoreContents",
+ description: Optional[str] = None,
+ is_default: Optional[bool] = None,
+ linked_info: Optional["LinkedInfo"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(DatastoreProperties, self).__init__(**kwargs)
+ self.contents = contents
+ self.description = description
+ self.has_been_validated = None
+ self.is_default = is_default
+ self.linked_info = linked_info
+ self.properties = properties
+ self.tags = tags
+
+
+class DatastorePropertiesResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.DatastoreProperties
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'DatastoreProperties'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "DatastoreProperties",
+ **kwargs
+ ):
+ super(DatastorePropertiesResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class DatastorePropertiesResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DatastoreProperties entities.
+
+ :param next_link: The link to the next page of DatastoreProperties objects. If null, there are
+ no additional pages.
+ :type next_link: str
+ :param value: An array of objects of type DatastoreProperties.
+ :type value: list[~azure_machine_learning_workspaces.models.DatastorePropertiesResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[DatastorePropertiesResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["DatastorePropertiesResource"]] = None,
+ **kwargs
+ ):
+ super(DatastorePropertiesResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class DataVersion(msrest.serialization.Model):
+ """Data asset version details.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param dataset_type: The Format of dataset. Possible values include: "Simple", "Dataflow".
+ :type dataset_type: str or ~azure_machine_learning_workspaces.models.DatasetType
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param description: The asset description text.
+ :type description: str
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param path: Required. The path of the file/directory in the datastore.
+ :type path: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'dataset_type': {'key': 'datasetType', 'type': 'str'},
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: str,
+ dataset_type: Optional[Union[str, "DatasetType"]] = None,
+ datastore_id: Optional[str] = None,
+ description: Optional[str] = None,
+ is_anonymous: Optional[bool] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(DataVersion, self).__init__(**kwargs)
+ self.dataset_type = dataset_type
+ self.datastore_id = datastore_id
+ self.description = description
+ self.is_anonymous = is_anonymous
+ self.path = path
+ self.properties = properties
+ self.tags = tags
+
+
+class DataVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.DataVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'DataVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "DataVersion",
+ **kwargs
+ ):
+ super(DataVersionResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class DataVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of DataVersion entities.
+
+ :param next_link: The link to the next page of DataVersion objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type DataVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.DataVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[DataVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["DataVersionResource"]] = None,
+ **kwargs
+ ):
+ super(DataVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class DeploymentLogs(msrest.serialization.Model):
+ """DeploymentLogs.
+
+ :param content: The retrieved online deployment logs.
+ :type content: str
+ """
+
+ _attribute_map = {
+ 'content': {'key': 'content', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ content: Optional[str] = None,
+ **kwargs
+ ):
+ super(DeploymentLogs, self).__init__(**kwargs)
+ self.content = content
+
+
+class DeploymentLogsRequest(msrest.serialization.Model):
+ """DeploymentLogsRequest.
+
+ :param container_type: The type of container to retrieve logs from. Possible values include:
+ "StorageInitializer", "InferenceServer".
+ :type container_type: str or ~azure_machine_learning_workspaces.models.ContainerType
+ :param tail: The maximum number of lines to tail.
+ :type tail: int
+ """
+
+ _attribute_map = {
+ 'container_type': {'key': 'containerType', 'type': 'str'},
+ 'tail': {'key': 'tail', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ container_type: Optional[Union[str, "ContainerType"]] = None,
+ tail: Optional[int] = None,
+ **kwargs
+ ):
+ super(DeploymentLogsRequest, self).__init__(**kwargs)
+ self.container_type = container_type
+ self.tail = tail
+
+
+class DistributionConfiguration(msrest.serialization.Model):
+ """Base definition for job distribution configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Mpi, PyTorch, TensorFlow.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'TensorFlow': 'TensorFlow'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DistributionConfiguration, self).__init__(**kwargs)
+ self.distribution_type = None # type: Optional[str]
+
+
+class DockerSpecification(msrest.serialization.Model):
+ """Configuration settings for Docker.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: DockerBuild, DockerImage.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ }
+
+ _subtype_map = {
+ 'docker_specification_type': {'Build': 'DockerBuild', 'Image': 'DockerImage'}
+ }
+
+ def __init__(
+ self,
+ *,
+ platform: Optional["DockerImagePlatform"] = None,
+ **kwargs
+ ):
+ super(DockerSpecification, self).__init__(**kwargs)
+ self.docker_specification_type = None # type: Optional[str]
+ self.platform = platform
+
+
+class DockerBuild(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param context: Path to a snapshot of the Docker Context. This property is only valid if
+ Dockerfile is specified.
+ The path is relative to the asset path which must contain a single Blob URI value.
+
+
+ .. raw:: html
+
+ .
+ :type context: str
+ :param dockerfile: Required. Docker command line instructions to assemble an image.
+
+
+ .. raw:: html
+
+ .
+ :type dockerfile: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'dockerfile': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'context': {'key': 'context', 'type': 'str'},
+ 'dockerfile': {'key': 'dockerfile', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ dockerfile: str,
+ platform: Optional["DockerImagePlatform"] = None,
+ context: Optional[str] = None,
+ **kwargs
+ ):
+ super(DockerBuild, self).__init__(platform=platform, **kwargs)
+ self.docker_specification_type = 'Build' # type: str
+ self.context = context
+ self.dockerfile = dockerfile
+
+
+class DockerImage(DockerSpecification):
+ """Class to represent configuration settings for Docker Build.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param docker_specification_type: Required. Docker specification must be either Build or
+ Image.Constant filled by server. Possible values include: "Build", "Image".
+ :type docker_specification_type: str or
+ ~azure_machine_learning_workspaces.models.DockerSpecificationType
+ :param platform: The platform information of the docker image.
+ :type platform: ~azure_machine_learning_workspaces.models.DockerImagePlatform
+ :param docker_image_uri: Required. Image name of a custom base image.
+
+
+ .. raw:: html
+
+ .
+ :type docker_image_uri: str
+ """
+
+ _validation = {
+ 'docker_specification_type': {'required': True},
+ 'docker_image_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'},
+ 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
+ 'docker_image_uri': {'key': 'dockerImageUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ docker_image_uri: str,
+ platform: Optional["DockerImagePlatform"] = None,
+ **kwargs
+ ):
+ super(DockerImage, self).__init__(platform=platform, **kwargs)
+ self.docker_specification_type = 'Image' # type: str
+ self.docker_image_uri = docker_image_uri
+
+
+class DockerImagePlatform(msrest.serialization.Model):
+ """DockerImagePlatform.
+
+ :param operating_system_type: The OS type the Environment. Possible values include: "Linux",
+ "Windows".
+ :type operating_system_type: str or
+ ~azure_machine_learning_workspaces.models.OperatingSystemType
+ """
+
+ _attribute_map = {
+ 'operating_system_type': {'key': 'operatingSystemType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ operating_system_type: Optional[Union[str, "OperatingSystemType"]] = None,
+ **kwargs
+ ):
+ super(DockerImagePlatform, self).__init__(**kwargs)
+ self.operating_system_type = operating_system_type
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param identity: The identity that will be used to access the key vault for encryption at rest.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityForCmk
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityForCmk'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Union[str, "EncryptionStatus"],
+ key_vault_properties: "KeyVaultProperties",
+ identity: Optional["IdentityForCmk"] = None,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = status
+ self.identity = identity
+ self.key_vault_properties = key_vault_properties
+
+
+class EndpointAuthKeys(msrest.serialization.Model):
+ """Keys for endpoint authentication.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_key: Optional[str] = None,
+ secondary_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(EndpointAuthKeys, self).__init__(**kwargs)
+ self.primary_key = primary_key
+ self.secondary_key = secondary_key
+
+
+class EndpointAuthToken(msrest.serialization.Model):
+ """Service Token.
+
+ :param access_token: Access token.
+ :type access_token: str
+ :param expiry_time_utc: Access token expiry time (UTC).
+ :type expiry_time_utc: long
+ :param refresh_after_time_utc: Refresh access token after time (UTC).
+ :type refresh_after_time_utc: long
+ :param token_type: Access token type.
+ :type token_type: str
+ """
+
+ _attribute_map = {
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'},
+ 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ access_token: Optional[str] = None,
+ expiry_time_utc: Optional[int] = None,
+ refresh_after_time_utc: Optional[int] = None,
+ token_type: Optional[str] = None,
+ **kwargs
+ ):
+ super(EndpointAuthToken, self).__init__(**kwargs)
+ self.access_token = access_token
+ self.expiry_time_utc = expiry_time_utc
+ self.refresh_after_time_utc = refresh_after_time_utc
+ self.token_type = token_type
+
+
+class EnvironmentContainer(msrest.serialization.Model):
+ """Container for environment specification versions.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(EnvironmentContainer, self).__init__(**kwargs)
+ self.description = description
+ self.properties = properties
+ self.tags = tags
+
+
+class EnvironmentContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.EnvironmentContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'EnvironmentContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "EnvironmentContainer",
+ **kwargs
+ ):
+ super(EnvironmentContainerResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentContainer entities.
+
+ :param next_link: The link to the next page of EnvironmentContainer objects. If null, there are
+ no additional pages.
+ :type next_link: str
+ :param value: An array of objects of type EnvironmentContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.EnvironmentContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[EnvironmentContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["EnvironmentContainerResource"]] = None,
+ **kwargs
+ ):
+ super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class EnvironmentSpecificationVersion(msrest.serialization.Model):
+ """Environment specification version details.
+
+
+.. raw:: html
+
+ .
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param conda_file: Standard configuration file used by Conda that lets you install any kind of
+ package, including Python, R, and C/C++ packages.
+
+
+ .. raw:: html
+
+ .
+ :type conda_file: str
+ :param description: The asset description text.
+ :type description: str
+ :param docker: Configuration settings for Docker.
+ :type docker: ~azure_machine_learning_workspaces.models.DockerSpecification
+ :ivar environment_specification_type: Environment specification is either user managed or
+ curated by the Azure ML service
+
+
+ .. raw:: html
+
+ . Possible values include: "Curated", "UserCreated".
+ :vartype environment_specification_type: str or
+ ~azure_machine_learning_workspaces.models.EnvironmentSpecificationType
+ :param inference_container_properties: Defines configuration specific to inference.
+ :type inference_container_properties:
+ ~azure_machine_learning_workspaces.models.InferenceContainerProperties
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'environment_specification_type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'conda_file': {'key': 'condaFile', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'docker': {'key': 'docker', 'type': 'DockerSpecification'},
+ 'environment_specification_type': {'key': 'environmentSpecificationType', 'type': 'str'},
+ 'inference_container_properties': {'key': 'inferenceContainerProperties', 'type': 'InferenceContainerProperties'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ conda_file: Optional[str] = None,
+ description: Optional[str] = None,
+ docker: Optional["DockerSpecification"] = None,
+ inference_container_properties: Optional["InferenceContainerProperties"] = None,
+ is_anonymous: Optional[bool] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersion, self).__init__(**kwargs)
+ self.conda_file = conda_file
+ self.description = description
+ self.docker = docker
+ self.environment_specification_type = None
+ self.inference_container_properties = inference_container_properties
+ self.is_anonymous = is_anonymous
+ self.properties = properties
+ self.tags = tags
+
+
+class EnvironmentSpecificationVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'EnvironmentSpecificationVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "EnvironmentSpecificationVersion",
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class EnvironmentSpecificationVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of EnvironmentSpecificationVersion entities.
+
+ :param next_link: The link to the next page of EnvironmentSpecificationVersion objects. If
+ null, there are no additional pages.
+ :type next_link: str
+ :param value: An array of objects of type EnvironmentSpecificationVersion.
+ :type value:
+ list[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[EnvironmentSpecificationVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["EnvironmentSpecificationVersionResource"]] = None,
+ **kwargs
+ ):
+ super(EnvironmentSpecificationVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+ """The resource management error additional info.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The additional info type.
+ :vartype type: str
+ :ivar info: The additional info.
+ :vartype info: object
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'info': {'key': 'info', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorAdditionalInfo, self).__init__(**kwargs)
+ self.type = None
+ self.info = None
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """The error detail.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: The error code.
+ :vartype code: str
+ :ivar message: The error message.
+ :vartype message: str
+ :ivar target: The error target.
+ :vartype target: str
+ :ivar details: The error details.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ :ivar additional_info: The error additional info.
+ :vartype additional_info: list[~azure_machine_learning_workspaces.models.ErrorAdditionalInfo]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'target': {'readonly': True},
+ 'details': {'readonly': True},
+ 'additional_info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+ self.additional_info = None
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).
+
+ :param error: The error object.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorDetail
+ """
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorDetail'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error: Optional["ErrorDetail"] = None,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.error = error
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ retail_price: float,
+ os_type: Union[str, "VmPriceOsType"],
+ vm_tier: Union[str, "VmTier"],
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = retail_price
+ self.os_type = os_type
+ self.vm_tier = vm_tier
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ billing_currency: Union[str, "BillingCurrency"],
+ unit_of_measure: Union[str, "UnitOfMeasure"],
+ values: List["EstimatedVmPrice"],
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = billing_currency
+ self.unit_of_measure = unit_of_measure
+ self.values = values
+
+
+class FlavorData(msrest.serialization.Model):
+ """FlavorData.
+
+ :param data: Model flavor-specific data.
+ :type data: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'data': {'key': 'data', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(FlavorData, self).__init__(**kwargs)
+ self.data = data
+
+
+class GlusterFsContents(DatastoreContents):
+ """GlusterFs datastore configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param contents_type: Required. Storage type backing the datastore.Constant filled by server.
+ Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile",
+ "AzureMySql", "AzurePostgreSql", "AzureSqlDatabase", "GlusterFs".
+ :type contents_type: str or ~azure_machine_learning_workspaces.models.ContentsType
+ :param server_address: Required. GlusterFS server address (can be the IP address or server
+ name).
+ :type server_address: str
+ :param volume_name: Required. GlusterFS volume name.
+ :type volume_name: str
+ """
+
+ _validation = {
+ 'contents_type': {'required': True},
+ 'server_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'volume_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'contents_type': {'key': 'contentsType', 'type': 'str'},
+ 'server_address': {'key': 'serverAddress', 'type': 'str'},
+ 'volume_name': {'key': 'volumeName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ server_address: str,
+ volume_name: str,
+ **kwargs
+ ):
+ super(GlusterFsContents, self).__init__(**kwargs)
+ self.contents_type = 'GlusterFs' # type: str
+ self.server_address = server_address
+ self.volume_name = volume_name
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["HdInsightProperties"] = None,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = properties
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+
+
+class IdAssetReference(AssetReferenceBase):
+ """Reference to an asset via its ARM resource ID.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param asset_id: Required. ARM resource ID of the asset.
+ :type asset_id: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ 'asset_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'asset_id': {'key': 'assetId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ asset_id: str,
+ **kwargs
+ ):
+ super(IdAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'Id' # type: str
+ self.asset_id = asset_id
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Optional[Union[str, "ResourceIdentityType"]] = None,
+ user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
+class IdentityForCmk(msrest.serialization.Model):
+ """Identity that will be used to access key vault for encryption at rest.
+
+ :param user_assigned_identity: The ArmId of the user assigned identity that will be used to
+ access the customer managed key vault.
+ :type user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(IdentityForCmk, self).__init__(**kwargs)
+ self.user_assigned_identity = user_assigned_identity
+
+
+class InferenceContainerProperties(msrest.serialization.Model):
+ """InferenceContainerProperties.
+
+ :param liveness_route: The route to check the liveness of the inference server container.
+ :type liveness_route: ~azure_machine_learning_workspaces.models.Route
+ :param readiness_route: The route to check the readiness of the inference server container.
+ :type readiness_route: ~azure_machine_learning_workspaces.models.Route
+ :param scoring_route: The port to send the scoring requests to, within the inference server
+ container.
+ :type scoring_route: ~azure_machine_learning_workspaces.models.Route
+ """
+
+ _attribute_map = {
+ 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'},
+ 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'},
+ 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'},
+ }
+
+ def __init__(
+ self,
+ *,
+ liveness_route: Optional["Route"] = None,
+ readiness_route: Optional["Route"] = None,
+ scoring_route: Optional["Route"] = None,
+ **kwargs
+ ):
+ super(InferenceContainerProperties, self).__init__(**kwargs)
+ self.liveness_route = liveness_route
+ self.readiness_route = readiness_route
+ self.scoring_route = scoring_route
+
+
+class InputDataBinding(msrest.serialization.Model):
+ """InputDataBinding.
+
+ :param data_id: ARM resource ID of the registered dataVersion.
+ :type data_id: str
+ :param mode: Mechanism for accessing the data artifact. Possible values include: "Mount",
+ "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ :param path_on_compute: Location of data inside the container process.
+ :type path_on_compute: str
+ """
+
+ _attribute_map = {
+ 'data_id': {'key': 'dataId', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_id: Optional[str] = None,
+ mode: Optional[Union[str, "DataBindingMode"]] = None,
+ path_on_compute: Optional[str] = None,
+ **kwargs
+ ):
+ super(InputDataBinding, self).__init__(**kwargs)
+ self.data_id = data_id
+ self.mode = mode
+ self.path_on_compute = path_on_compute
+
+
+class JobBaseResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.JobBase
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'JobBase'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "JobBase",
+ **kwargs
+ ):
+ super(JobBaseResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class JobBaseResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of JobBase entities.
+
+ :param next_link: The link to the next page of JobBase objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type JobBase.
+ :type value: list[~azure_machine_learning_workspaces.models.JobBaseResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[JobBaseResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["JobBaseResource"]] = None,
+ **kwargs
+ ):
+ super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class JobEndpoint(msrest.serialization.Model):
+ """Job endpoint definition.
+
+ :param endpoint: Url for endpoint.
+ :type endpoint: str
+ :param job_endpoint_type: Endpoint type.
+ :type job_endpoint_type: str
+ :param port: Port for endpoint.
+ :type port: int
+ :param properties: Additional properties to set on the endpoint.
+ :type properties: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'endpoint': {'key': 'endpoint', 'type': 'str'},
+ 'job_endpoint_type': {'key': 'jobEndpointType', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ endpoint: Optional[str] = None,
+ job_endpoint_type: Optional[str] = None,
+ port: Optional[int] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(JobEndpoint, self).__init__(**kwargs)
+ self.endpoint = endpoint
+ self.job_endpoint_type = job_endpoint_type
+ self.port = port
+ self.properties = properties
+
+
+class JobOutput(msrest.serialization.Model):
+ """Job output definition container information on where to find job output/logs.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar datastore_id: ARM ID of the datastore where the job logs and artifacts are stored, or
+ null for the default container ("azureml") in the workspace's storage account.
+ :vartype datastore_id: str
+ :ivar path: Path within the datastore to the job logs and artifacts.
+ :vartype path: str
+ """
+
+ _validation = {
+ 'datastore_id': {'readonly': True},
+ 'path': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(JobOutput, self).__init__(**kwargs)
+ self.datastore_id = None
+ self.path = None
+
+
+class OnlineDeployment(msrest.serialization.Model):
+ """OnlineDeployment.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: K8SOnlineDeployment, ManagedOnlineDeployment.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: If true, enables Application Insights logging.
+ :type app_insights_enabled: bool
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ }
+
+ _subtype_map = {
+ 'endpoint_compute_type': {'K8S': 'K8SOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'}
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ code_configuration: Optional["CodeConfiguration"] = None,
+ description: Optional[str] = None,
+ environment_id: Optional[str] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ liveness_probe: Optional["ProbeSettings"] = None,
+ model: Optional["AssetReferenceBase"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ request_settings: Optional["OnlineRequestSettings"] = None,
+ scale_settings: Optional["OnlineScaleSettings"] = None,
+ **kwargs
+ ):
+ super(OnlineDeployment, self).__init__(**kwargs)
+ self.app_insights_enabled = app_insights_enabled
+ self.code_configuration = code_configuration
+ self.description = description
+ self.endpoint_compute_type = None # type: Optional[str]
+ self.environment_id = environment_id
+ self.environment_variables = environment_variables
+ self.liveness_probe = liveness_probe
+ self.model = model
+ self.properties = properties
+ self.provisioning_state = None
+ self.request_settings = request_settings
+ self.scale_settings = scale_settings
+
+
+class K8SOnlineDeployment(OnlineDeployment):
+ """K8SOnlineDeployment.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: If true, enables Application Insights logging.
+ :type app_insights_enabled: bool
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param container_resource_requirements: Resource requirements for each container instance
+ within an online deployment.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ code_configuration: Optional["CodeConfiguration"] = None,
+ description: Optional[str] = None,
+ environment_id: Optional[str] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ liveness_probe: Optional["ProbeSettings"] = None,
+ model: Optional["AssetReferenceBase"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ request_settings: Optional["OnlineRequestSettings"] = None,
+ scale_settings: Optional["OnlineScaleSettings"] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ **kwargs
+ ):
+ super(K8SOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, liveness_probe=liveness_probe, model=model, properties=properties, request_settings=request_settings, scale_settings=scale_settings, **kwargs)
+ self.endpoint_compute_type = 'K8S' # type: str
+ self.container_resource_requirements = container_resource_requirements
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key_vault_arm_id: str,
+ key_identifier: str,
+ identity_client_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = key_vault_arm_id
+ self.key_identifier = key_identifier
+ self.identity_client_id = identity_client_id
+
+
+class LabelCategory(msrest.serialization.Model):
+ """Label category definition.
+
+ :param allow_multi_select: Indicates whether it is allowed to select multiple classes in this
+ category.
+ :type allow_multi_select: bool
+ :param classes: Dictionary of label classes in this category.
+ :type classes: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ :param display_name: Display name of the label category.
+ :type display_name: str
+ """
+
+ _attribute_map = {
+ 'allow_multi_select': {'key': 'allowMultiSelect', 'type': 'bool'},
+ 'classes': {'key': 'classes', 'type': '{LabelClass}'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ allow_multi_select: Optional[bool] = None,
+ classes: Optional[Dict[str, "LabelClass"]] = None,
+ display_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(LabelCategory, self).__init__(**kwargs)
+ self.allow_multi_select = allow_multi_select
+ self.classes = classes
+ self.display_name = display_name
+
+
+class LabelClass(msrest.serialization.Model):
+ """Label class definition.
+
+ :param display_name: Display name of the label class.
+ :type display_name: str
+ :param subclasses: Dictionary of subclasses of the label class.
+ :type subclasses: dict[str, ~azure_machine_learning_workspaces.models.LabelClass]
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ subclasses: Optional[Dict[str, "LabelClass"]] = None,
+ **kwargs
+ ):
+ super(LabelClass, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.subclasses = subclasses
+
+
+class LabelingDatasetConfiguration(msrest.serialization.Model):
+ """Labeling dataset configuration definition.
+
+ :param asset_name: Name of the data asset to perform labeling.
+ :type asset_name: str
+ :param dataset_version: AML dataset version.
+ :type dataset_version: str
+ :param incremental_dataset_refresh_enabled: Indicates whether to enable incremental dataset
+ refresh.
+ :type incremental_dataset_refresh_enabled: bool
+ """
+
+ _attribute_map = {
+ 'asset_name': {'key': 'assetName', 'type': 'str'},
+ 'dataset_version': {'key': 'datasetVersion', 'type': 'str'},
+ 'incremental_dataset_refresh_enabled': {'key': 'incrementalDatasetRefreshEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ asset_name: Optional[str] = None,
+ dataset_version: Optional[str] = None,
+ incremental_dataset_refresh_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(LabelingDatasetConfiguration, self).__init__(**kwargs)
+ self.asset_name = asset_name
+ self.dataset_version = dataset_version
+ self.incremental_dataset_refresh_enabled = incremental_dataset_refresh_enabled
+
+
+class LabelingJob(msrest.serialization.Model):
+ """Labeling job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar created_time_utc: Created time of the job in UTC timezone.
+ :vartype created_time_utc: ~datetime.datetime
+ :param dataset_configuration: Configuration of dataset used in the job.
+ :type dataset_configuration:
+ ~azure_machine_learning_workspaces.models.LabelingDatasetConfiguration
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_instructions: Labeling instructions of the job.
+ :type job_instructions: ~azure_machine_learning_workspaces.models.LabelingJobInstructions
+ :param job_type: Required. Specifies the type of job. This field should always be set to
+ "Labeling". Possible values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param label_categories: Label categories of the job.
+ :type label_categories: dict[str, ~azure_machine_learning_workspaces.models.LabelCategory]
+ :param labeling_job_media_properties: Media type specific properties in the job.
+ :type labeling_job_media_properties:
+ ~azure_machine_learning_workspaces.models.LabelingJobMediaProperties
+ :param ml_assist_configuration: Configuration of MLAssist feature in the job.
+ :type ml_assist_configuration: ~azure_machine_learning_workspaces.models.MlAssistConfiguration
+ :ivar progress_metrics: Progress metrics of the job.
+ :vartype progress_metrics: ~azure_machine_learning_workspaces.models.ProgressMetrics
+ :ivar project_id: Internal id of the job(Previously called project).
+ :vartype project_id: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the labeling job provisioning state. Possible values
+ include: "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :ivar status: Status of the job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused", "Unknown".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :ivar status_messages: Status messages of the job.
+ :vartype status_messages: list[~azure_machine_learning_workspaces.models.StatusMessage]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'created_time_utc': {'readonly': True},
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'progress_metrics': {'readonly': True},
+ 'project_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'status': {'readonly': True},
+ 'status_messages': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
+ 'dataset_configuration': {'key': 'datasetConfiguration', 'type': 'LabelingDatasetConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'},
+ 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'},
+ 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MlAssistConfiguration'},
+ 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'},
+ 'project_id': {'key': 'projectId', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ job_type: Union[str, "JobType"],
+ dataset_configuration: Optional["LabelingDatasetConfiguration"] = None,
+ description: Optional[str] = None,
+ job_instructions: Optional["LabelingJobInstructions"] = None,
+ label_categories: Optional[Dict[str, "LabelCategory"]] = None,
+ labeling_job_media_properties: Optional["LabelingJobMediaProperties"] = None,
+ ml_assist_configuration: Optional["MlAssistConfiguration"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(LabelingJob, self).__init__(**kwargs)
+ self.created_time_utc = None
+ self.dataset_configuration = dataset_configuration
+ self.description = description
+ self.interaction_endpoints = None
+ self.job_instructions = job_instructions
+ self.job_type = job_type
+ self.label_categories = label_categories
+ self.labeling_job_media_properties = labeling_job_media_properties
+ self.ml_assist_configuration = ml_assist_configuration
+ self.progress_metrics = None
+ self.project_id = None
+ self.properties = properties
+ self.provisioning_state = None
+ self.status = None
+ self.status_messages = None
+ self.tags = tags
+
+
+class LabelingJobMediaProperties(msrest.serialization.Model):
+ """Properties of a labeling job.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LabelingJobMediaProperties, self).__init__(**kwargs)
+ self.media_type = None # type: Optional[str]
+
+
+class LabelingJobImageProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for image data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of image labeling job. Possible values include:
+ "Classification", "BoundingBox", "InstanceSegmentation".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.ImageAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ annotation_type: Optional[Union[str, "ImageAnnotationType"]] = None,
+ **kwargs
+ ):
+ super(LabelingJobImageProperties, self).__init__(**kwargs)
+ self.media_type = 'Image' # type: str
+ self.annotation_type = annotation_type
+
+
+class LabelingJobInstructions(msrest.serialization.Model):
+ """Instructions for labeling job.
+
+ :param uri: The link to a page with detailed labeling instructions for labelers.
+ :type uri: str
+ """
+
+ _attribute_map = {
+ 'uri': {'key': 'uri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ uri: Optional[str] = None,
+ **kwargs
+ ):
+ super(LabelingJobInstructions, self).__init__(**kwargs)
+ self.uri = uri
+
+
+class LabelingJobResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.LabelingJob
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'LabelingJob'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "LabelingJob",
+ **kwargs
+ ):
+ super(LabelingJobResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of LabelingJob entities.
+
+ :param next_link: The link to the next page of LabelingJob objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type LabelingJob.
+ :type value: list[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[LabelingJobResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["LabelingJobResource"]] = None,
+ **kwargs
+ ):
+ super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class LabelingJobTextProperties(LabelingJobMediaProperties):
+ """Properties of a labeling job for text data.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param media_type: Required. Media type of the job.Constant filled by server. Possible values
+ include: "Image", "Text".
+ :type media_type: str or ~azure_machine_learning_workspaces.models.MediaType
+ :param annotation_type: Annotation type of text labeling job. Possible values include:
+ "Classification".
+ :type annotation_type: str or ~azure_machine_learning_workspaces.models.TextAnnotationType
+ """
+
+ _validation = {
+ 'media_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'media_type': {'key': 'mediaType', 'type': 'str'},
+ 'annotation_type': {'key': 'annotationType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ annotation_type: Optional[Union[str, "TextAnnotationType"]] = None,
+ **kwargs
+ ):
+ super(LabelingJobTextProperties, self).__init__(**kwargs)
+ self.media_type = 'Text' # type: str
+ self.annotation_type = annotation_type
+
+
+class LinkedInfo(msrest.serialization.Model):
+ """Information about a datastore origin, if linked.
+
+ :param linked_id: Linked service ID.
+ :type linked_id: str
+ :param linked_resource_name: Linked service resource name.
+ :type linked_resource_name: str
+ :param origin: Type of the linked service. Possible values include: "Synapse".
+ :type origin: str or ~azure_machine_learning_workspaces.models.OriginType
+ """
+
+ _attribute_map = {
+ 'linked_id': {'key': 'linkedId', 'type': 'str'},
+ 'linked_resource_name': {'key': 'linkedResourceName', 'type': 'str'},
+ 'origin': {'key': 'origin', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ linked_id: Optional[str] = None,
+ linked_resource_name: Optional[str] = None,
+ origin: Optional[Union[str, "OriginType"]] = None,
+ **kwargs
+ ):
+ super(LinkedInfo, self).__init__(**kwargs)
+ self.linked_id = linked_id
+ self.linked_resource_name = linked_resource_name
+ self.origin = origin
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListStorageAccountKeysResult(msrest.serialization.Model):
+ """ListStorageAccountKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListStorageAccountKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :ivar notebook_access_keys:
+ :vartype notebook_access_keys: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ 'notebook_access_keys': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ManagedIdentity(IdentityConfiguration):
+ """Managed identity configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param identity_type: Required. Specifies the type of identity framework.Constant filled by
+ server. Possible values include: "Managed", "AMLToken".
+ :type identity_type: str or ~azure_machine_learning_workspaces.models.IdentityConfigurationType
+ :param client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not
+ set this field.
+ :type client_id: str
+ :param object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not
+ set this field.
+ :type object_id: str
+ :param resource_id: Specifies a user-assigned identity by ARM resource ID. For system-assigned,
+ do not set this field.
+ :type resource_id: str
+ """
+
+ _validation = {
+ 'identity_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'identity_type': {'key': 'identityType', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: Optional[str] = None,
+ object_id: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(ManagedIdentity, self).__init__(**kwargs)
+ self.identity_type = 'Managed' # type: str
+ self.client_id = client_id
+ self.object_id = object_id
+ self.resource_id = resource_id
+
+
+class ManagedOnlineDeployment(OnlineDeployment):
+ """ManagedOnlineDeployment.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: If true, enables Application Insights logging.
+ :type app_insights_enabled: bool
+ :param code_configuration: Code configuration for the endpoint deployment.
+ :type code_configuration: ~azure_machine_learning_workspaces.models.CodeConfiguration
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param environment_id: ARM resource ID of the environment specification for the endpoint
+ deployment.
+ :type environment_id: str
+ :param environment_variables: Environment variables configuration for the deployment.
+ :type environment_variables: dict[str, str]
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param model: Reference to the model asset for the endpoint deployment.
+ :type model: ~azure_machine_learning_workspaces.models.AssetReferenceBase
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values
+ include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.DeploymentProvisioningState
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param instance_type: Compute instance type.
+ :type instance_type: str
+ :param readiness_probe: Deployment container liveness/readiness probe configuration.
+ :type readiness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'model': {'key': 'model', 'type': 'AssetReferenceBase'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'instance_type': {'key': 'instanceType', 'type': 'str'},
+ 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ code_configuration: Optional["CodeConfiguration"] = None,
+ description: Optional[str] = None,
+ environment_id: Optional[str] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ liveness_probe: Optional["ProbeSettings"] = None,
+ model: Optional["AssetReferenceBase"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ request_settings: Optional["OnlineRequestSettings"] = None,
+ scale_settings: Optional["OnlineScaleSettings"] = None,
+ instance_type: Optional[str] = None,
+ readiness_probe: Optional["ProbeSettings"] = None,
+ **kwargs
+ ):
+ super(ManagedOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, liveness_probe=liveness_probe, model=model, properties=properties, request_settings=request_settings, scale_settings=scale_settings, **kwargs)
+ self.endpoint_compute_type = 'Managed' # type: str
+ self.instance_type = instance_type
+ self.readiness_probe = readiness_probe
+
+
+class ManualScaleSettings(OnlineScaleSettings):
+ """ManualScaleSettings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_instances: Maximum number of instances for this deployment.
+ :type max_instances: int
+ :param min_instances: Minimum number of instances for this deployment.
+ :type min_instances: int
+ :param scale_type: Required. Type of deployment scaling algorithm.Constant filled by server.
+ Possible values include: "Auto", "Manual".
+ :type scale_type: str or ~azure_machine_learning_workspaces.models.ScaleType
+ :param instance_count: Fixed number of instances for this deployment.
+ :type instance_count: int
+ """
+
+ _validation = {
+ 'scale_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_instances': {'key': 'maxInstances', 'type': 'int'},
+ 'min_instances': {'key': 'minInstances', 'type': 'int'},
+ 'scale_type': {'key': 'scaleType', 'type': 'str'},
+ 'instance_count': {'key': 'instanceCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_instances: Optional[int] = None,
+ min_instances: Optional[int] = None,
+ instance_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(ManualScaleSettings, self).__init__(max_instances=max_instances, min_instances=min_instances, **kwargs)
+ self.scale_type = 'Manual' # type: str
+ self.instance_count = instance_count
+
+
+class MedianStoppingPolicy(EarlyTerminationPolicy):
+ """Defines an early termination policy based on running averages of the primary metric of all runs.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ delay_evaluation: Optional[int] = None,
+ evaluation_interval: Optional[int] = None,
+ **kwargs
+ ):
+ super(MedianStoppingPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs)
+ self.policy_type = 'MedianStopping' # type: str
+
+
+class MlAssistConfiguration(msrest.serialization.Model):
+ """Labeling MLAssist configuration definition.
+
+ :param inferencing_compute_binding: AML compute binding used in inferencing.
+ :type inferencing_compute_binding:
+ ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param ml_assist_enabled: Indicates whether MLAssist feature is enabled.
+ :type ml_assist_enabled: bool
+ :param training_compute_binding: AML compute binding used in training.
+ :type training_compute_binding: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ """
+
+ _attribute_map = {
+ 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'ComputeConfiguration'},
+ 'ml_assist_enabled': {'key': 'mlAssistEnabled', 'type': 'bool'},
+ 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'ComputeConfiguration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ inferencing_compute_binding: Optional["ComputeConfiguration"] = None,
+ ml_assist_enabled: Optional[bool] = None,
+ training_compute_binding: Optional["ComputeConfiguration"] = None,
+ **kwargs
+ ):
+ super(MlAssistConfiguration, self).__init__(**kwargs)
+ self.inferencing_compute_binding = inferencing_compute_binding
+ self.ml_assist_enabled = ml_assist_enabled
+ self.training_compute_binding = training_compute_binding
+
+
+class ModelContainer(msrest.serialization.Model):
+ """ModelContainer.
+
+ :param description: The asset description text.
+ :type description: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(ModelContainer, self).__init__(**kwargs)
+ self.description = description
+ self.properties = properties
+ self.tags = tags
+
+
+class ModelContainerResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.ModelContainer
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'ModelContainer'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "ModelContainer",
+ **kwargs
+ ):
+ super(ModelContainerResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelContainer entities.
+
+ :param next_link: The link to the next page of ModelContainer objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type ModelContainer.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelContainerResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[ModelContainerResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["ModelContainerResource"]] = None,
+ **kwargs
+ ):
+ super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class ModelVersion(msrest.serialization.Model):
+ """Model asset version details.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param datastore_id: ARM resource ID of the datastore where the asset is located.
+ :type datastore_id: str
+ :param description: The asset description text.
+ :type description: str
+ :param flavors: Mapping of model flavors to their properties.
+ :type flavors: dict[str, ~azure_machine_learning_workspaces.models.FlavorData]
+ :param is_anonymous: If the name version are system generated (anonymous registration).
+ :type is_anonymous: bool
+ :param path: Required. The path of the file/directory in the datastore.
+ :type path: str
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'flavors': {'key': 'flavors', 'type': '{FlavorData}'},
+ 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+ 'path': {'key': 'path', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: str,
+ datastore_id: Optional[str] = None,
+ description: Optional[str] = None,
+ flavors: Optional[Dict[str, "FlavorData"]] = None,
+ is_anonymous: Optional[bool] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(ModelVersion, self).__init__(**kwargs)
+ self.datastore_id = datastore_id
+ self.description = description
+ self.flavors = flavors
+ self.is_anonymous = is_anonymous
+ self.path = path
+ self.properties = properties
+ self.tags = tags
+
+
+class ModelVersionResource(Resource):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.ModelVersion
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'ModelVersion'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "ModelVersion",
+ **kwargs
+ ):
+ super(ModelVersionResource, self).__init__(**kwargs)
+ self.properties = properties
+ self.system_data = None
+
+
+class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of ModelVersion entities.
+
+ :param next_link: The link to the next page of ModelVersion objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type ModelVersion.
+ :type value: list[~azure_machine_learning_workspaces.models.ModelVersionResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[ModelVersionResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["ModelVersionResource"]] = None,
+ **kwargs
+ ):
+ super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class Mpi(DistributionConfiguration):
+ """MPI distribution configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count_per_instance: Number of processes per MPI node.
+ :type process_count_per_instance: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ process_count_per_instance: Optional[int] = None,
+ **kwargs
+ ):
+ super(Mpi, self).__init__(**kwargs)
+ self.distribution_type = 'Mpi' # type: str
+ self.process_count_per_instance = process_count_per_instance
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NoneDatastoreCredentials(DatastoreCredentials):
+ """Empty/none datastore credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: Empty/none datastore secret.
+ :type secrets: ~azure_machine_learning_workspaces.models.DatastoreSecrets
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'DatastoreSecrets'},
+ }
+
+ def __init__(
+ self,
+ *,
+ secrets: Optional["DatastoreSecrets"] = None,
+ **kwargs
+ ):
+ super(NoneDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'None' # type: str
+ self.secrets = secrets
+
+
+class NoneDatastoreSecrets(DatastoreSecrets):
+ """Empty/none datastore secret.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NoneDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'None' # type: str
+
+
+class NotebookAccessTokenResult(msrest.serialization.Model):
+ """NotebookAccessTokenResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar notebook_resource_id:
+ :vartype notebook_resource_id: str
+ :ivar host_name:
+ :vartype host_name: str
+ :ivar public_dns:
+ :vartype public_dns: str
+ :ivar access_token:
+ :vartype access_token: str
+ :ivar token_type:
+ :vartype token_type: str
+ :ivar expires_in:
+ :vartype expires_in: int
+ :ivar refresh_token:
+ :vartype refresh_token: str
+ :ivar scope:
+ :vartype scope: str
+ """
+
+ _validation = {
+ 'notebook_resource_id': {'readonly': True},
+ 'host_name': {'readonly': True},
+ 'public_dns': {'readonly': True},
+ 'access_token': {'readonly': True},
+ 'token_type': {'readonly': True},
+ 'expires_in': {'readonly': True},
+ 'refresh_token': {'readonly': True},
+ 'scope': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'},
+ 'host_name': {'key': 'hostName', 'type': 'str'},
+ 'public_dns': {'key': 'publicDns', 'type': 'str'},
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expires_in': {'key': 'expiresIn', 'type': 'int'},
+ 'refresh_token': {'key': 'refreshToken', 'type': 'str'},
+ 'scope': {'key': 'scope', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookAccessTokenResult, self).__init__(**kwargs)
+ self.notebook_resource_id = None
+ self.host_name = None
+ self.public_dns = None
+ self.access_token = None
+ self.token_type = None
+ self.expires_in = None
+ self.refresh_token = None
+ self.scope = None
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error_message: Optional[str] = None,
+ status_code: Optional[int] = None,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = error_message
+ self.status_code = status_code
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ fqdn: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ notebook_preparation_error: Optional["NotebookPreparationError"] = None,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = fqdn
+ self.resource_id = resource_id
+ self.notebook_preparation_error = notebook_preparation_error
+
+
+class Objective(msrest.serialization.Model):
+ """Optimization objective.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param goal: Required. Defines supported metric goals for hyperparameter tuning. Possible
+ values include: "Minimize", "Maximize".
+ :type goal: str or ~azure_machine_learning_workspaces.models.Goal
+ :param primary_metric: Required. Name of the metric to optimize.
+ :type primary_metric: str
+ """
+
+ _validation = {
+ 'goal': {'required': True},
+ 'primary_metric': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'goal': {'key': 'goal', 'type': 'str'},
+ 'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ goal: Union[str, "Goal"],
+ primary_metric: str,
+ **kwargs
+ ):
+ super(Objective, self).__init__(**kwargs)
+ self.goal = goal
+ self.primary_metric = primary_metric
+
+
+class OnlineDeploymentTrackedResource(TrackedResource):
+ """OnlineDeploymentTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.OnlineDeployment
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'OnlineDeployment'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: str,
+ properties: "OnlineDeployment",
+ tags: Optional[Dict[str, str]] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResource, self).__init__(tags=tags, location=location, **kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.properties = properties
+ self.system_data = None
+
+
+class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineDeployment entities.
+
+ :param next_link: The link to the next page of OnlineDeployment objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type OnlineDeployment.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[OnlineDeploymentTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["OnlineDeploymentTrackedResource"]] = None,
+ **kwargs
+ ):
+ super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class OnlineEndpoint(msrest.serialization.Model):
+ """Online endpoint configuration.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param auth_mode: Required. Inference endpoint authentication mode type. Possible values
+ include: "AMLToken", "Key", "AADToken".
+ :type auth_mode: str or ~azure_machine_learning_workspaces.models.EndpointAuthMode
+ :param description: Description of the inference endpoint.
+ :type description: str
+ :param keys: EndpointAuthKeys to set initially on an Endpoint.
+ This property will always be returned as null. AuthKey values must be retrieved using the
+ ListKeys API.
+ :type keys: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :param properties: Property dictionary. Properties can be added, but not removed or altered.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: State of endpoint provisioning. Possible values include: "Creating",
+ "Deleting", "Succeeded", "Failed", "Updating", "Canceled".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.EndpointProvisioningState
+ :ivar scoring_uri: Endpoint URI.
+ :vartype scoring_uri: str
+ :ivar swagger_uri: Endpoint Swagger URI.
+ :vartype swagger_uri: str
+ :param target: ARM resource ID of the compute if it exists.
+ optional.
+ :type target: str
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _validation = {
+ 'auth_mode': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'auth_mode': {'key': 'authMode', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ auth_mode: Union[str, "EndpointAuthMode"],
+ description: Optional[str] = None,
+ keys: Optional["EndpointAuthKeys"] = None,
+ properties: Optional[Dict[str, str]] = None,
+ target: Optional[str] = None,
+ traffic: Optional[Dict[str, int]] = None,
+ **kwargs
+ ):
+ super(OnlineEndpoint, self).__init__(**kwargs)
+ self.auth_mode = auth_mode
+ self.description = description
+ self.keys = keys
+ self.properties = properties
+ self.provisioning_state = None
+ self.scoring_uri = None
+ self.swagger_uri = None
+ self.target = target
+ self.traffic = traffic
+
+
+class OnlineEndpointTrackedResource(TrackedResource):
+ """OnlineEndpointTrackedResource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ :param location: Required. The geo-location where the resource lives.
+ :type location: str
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param properties: Required. Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.OnlineEndpoint
+ :ivar system_data: System data associated with resource provider.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'location': {'required': True},
+ 'properties': {'required': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'OnlineEndpoint'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ location: str,
+ properties: "OnlineEndpoint",
+ tags: Optional[Dict[str, str]] = None,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResource, self).__init__(tags=tags, location=location, **kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.properties = properties
+ self.system_data = None
+
+
+class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model):
+ """A paginated list of OnlineEndpoint entities.
+
+ :param next_link: The link to the next page of OnlineEndpoint objects. If null, there are no
+ additional pages.
+ :type next_link: str
+ :param value: An array of objects of type OnlineEndpoint.
+ :type value: list[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ """
+
+ _attribute_map = {
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[OnlineEndpointTrackedResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ next_link: Optional[str] = None,
+ value: Optional[List["OnlineEndpointTrackedResource"]] = None,
+ **kwargs
+ ):
+ super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class OnlineRequestSettings(msrest.serialization.Model):
+ """Online deployment scoring requests configuration.
+
+ :param max_concurrent_requests_per_instance: The number of requests allowed to queue at once
+ for this deployment.
+ :type max_concurrent_requests_per_instance: int
+ :param max_queue_wait: The maximum queue wait time in ISO 8601 format. Supports millisecond
+ precision.
+ :type max_queue_wait: ~datetime.timedelta
+ :param request_timeout: The request timeout in ISO 8601 format. Supports millisecond precision.
+ :type request_timeout: ~datetime.timedelta
+ """
+
+ _attribute_map = {
+ 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'},
+ 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'},
+ 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_concurrent_requests_per_instance: Optional[int] = None,
+ max_queue_wait: Optional[datetime.timedelta] = None,
+ request_timeout: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(OnlineRequestSettings, self).__init__(**kwargs)
+ self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance
+ self.max_queue_wait = max_queue_wait
+ self.request_timeout = request_timeout
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ display: Optional["OperationDisplay"] = None,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = name
+ self.display = display
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ provider: Optional[str] = None,
+ resource: Optional[str] = None,
+ operation: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = provider
+ self.resource = resource
+ self.operation = operation
+ self.description = description
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Operation"]] = None,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class OutputDataBinding(msrest.serialization.Model):
+ """OutputDataBinding.
+
+ :param datastore_id: ARM resource ID of the datastore where the data output will be stored.
+ :type datastore_id: str
+ :param mode: Mechanism for data movement to datastore. Possible values include: "Mount",
+ "Download", "Upload".
+ :type mode: str or ~azure_machine_learning_workspaces.models.DataBindingMode
+ :param path_on_compute: Location of data inside the container process.
+ :type path_on_compute: str
+ :param path_on_datastore: Path within the datastore to the data.
+ :type path_on_datastore: str
+ """
+
+ _attribute_map = {
+ 'datastore_id': {'key': 'datastoreId', 'type': 'str'},
+ 'mode': {'key': 'mode', 'type': 'str'},
+ 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
+ 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ datastore_id: Optional[str] = None,
+ mode: Optional[Union[str, "DataBindingMode"]] = None,
+ path_on_compute: Optional[str] = None,
+ path_on_datastore: Optional[str] = None,
+ **kwargs
+ ):
+ super(OutputDataBinding, self).__init__(**kwargs)
+ self.datastore_id = datastore_id
+ self.mode = mode
+ self.path_on_compute = path_on_compute
+ self.path_on_datastore = path_on_datastore
+
+
+class OutputPathAssetReference(AssetReferenceBase):
+ """Reference to an asset via its path in a job output.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param reference_type: Required. Specifies the type of asset reference.Constant filled by
+ server. Possible values include: "Id", "DataPath", "OutputPath".
+ :type reference_type: str or ~azure_machine_learning_workspaces.models.ReferenceType
+ :param job_id: ARM resource ID of the job.
+ :type job_id: str
+ :param path: The path of the file/directory in the job output.
+ :type path: str
+ """
+
+ _validation = {
+ 'reference_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'reference_type': {'key': 'referenceType', 'type': 'str'},
+ 'job_id': {'key': 'jobId', 'type': 'str'},
+ 'path': {'key': 'path', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ job_id: Optional[str] = None,
+ path: Optional[str] = None,
+ **kwargs
+ ):
+ super(OutputPathAssetReference, self).__init__(**kwargs)
+ self.reference_type = 'OutputPath' # type: str
+ self.job_id = job_id
+ self.path = path
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ComputeResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceConnection"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class PartialOnlineDeployment(msrest.serialization.Model):
+ """Mutable online deployment configuration.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: PartialAksOnlineDeployment, PartialManagedOnlineDeployment.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: Whether AppInsights telemetry is enabled for this online
+ deployment.
+ :type app_insights_enabled: bool
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ }
+
+ _subtype_map = {
+ 'endpoint_compute_type': {'K8S': 'PartialAksOnlineDeployment', 'Managed': 'PartialManagedOnlineDeployment'}
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ liveness_probe: Optional["ProbeSettings"] = None,
+ request_settings: Optional["OnlineRequestSettings"] = None,
+ scale_settings: Optional["OnlineScaleSettings"] = None,
+ **kwargs
+ ):
+ super(PartialOnlineDeployment, self).__init__(**kwargs)
+ self.app_insights_enabled = app_insights_enabled
+ self.endpoint_compute_type = None # type: Optional[str]
+ self.liveness_probe = liveness_probe
+ self.request_settings = request_settings
+ self.scale_settings = scale_settings
+
+
+class PartialAksOnlineDeployment(PartialOnlineDeployment):
+ """PartialAksOnlineDeployment.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: Whether AppInsights telemetry is enabled for this online
+ deployment.
+ :type app_insights_enabled: bool
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param container_resource_requirements: Resource requirements for each container instance
+ within an online deployment.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ liveness_probe: Optional["ProbeSettings"] = None,
+ request_settings: Optional["OnlineRequestSettings"] = None,
+ scale_settings: Optional["OnlineScaleSettings"] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ **kwargs
+ ):
+ super(PartialAksOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, liveness_probe=liveness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs)
+ self.endpoint_compute_type = 'K8S' # type: str
+ self.container_resource_requirements = container_resource_requirements
+
+
+class PartialBatchDeployment(msrest.serialization.Model):
+ """Mutable batch inference settings per deployment.
+
+ :param description: Description of the endpoint deployment.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(PartialBatchDeployment, self).__init__(**kwargs)
+ self.description = description
+
+
+class PartialBatchDeploymentPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialBatchDeployment
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ location: Optional[str] = None,
+ properties: Optional["PartialBatchDeployment"] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(PartialBatchDeploymentPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.location = location
+ self.properties = properties
+ self.tags = tags
+
+
+class PartialBatchEndpoint(msrest.serialization.Model):
+ """Mutable Batch endpoint configuration.
+
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _attribute_map = {
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ traffic: Optional[Dict[str, int]] = None,
+ **kwargs
+ ):
+ super(PartialBatchEndpoint, self).__init__(**kwargs)
+ self.traffic = traffic
+
+
+class PartialBatchEndpointPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialBatchEndpoint
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialBatchEndpoint'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ location: Optional[str] = None,
+ properties: Optional["PartialBatchEndpoint"] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(PartialBatchEndpointPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.location = location
+ self.properties = properties
+ self.tags = tags
+
+
+class PartialManagedOnlineDeployment(PartialOnlineDeployment):
+ """PartialManagedOnlineDeployment.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param app_insights_enabled: Whether AppInsights telemetry is enabled for this online
+ deployment.
+ :type app_insights_enabled: bool
+ :param endpoint_compute_type: Required. The compute type of the endpoint.Constant filled by
+ server. Possible values include: "Managed", "K8S", "AzureMLCompute".
+ :type endpoint_compute_type: str or
+ ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param liveness_probe: Deployment container liveness/readiness probe configuration.
+ :type liveness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ :param request_settings: Online deployment scoring requests configuration.
+ :type request_settings: ~azure_machine_learning_workspaces.models.OnlineRequestSettings
+ :param scale_settings: Online deployment scaling configuration.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.OnlineScaleSettings
+ :param readiness_probe: Deployment container liveness/readiness probe configuration.
+ :type readiness_probe: ~azure_machine_learning_workspaces.models.ProbeSettings
+ """
+
+ _validation = {
+ 'endpoint_compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'},
+ 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'},
+ 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'},
+ 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ app_insights_enabled: Optional[bool] = None,
+ liveness_probe: Optional["ProbeSettings"] = None,
+ request_settings: Optional["OnlineRequestSettings"] = None,
+ scale_settings: Optional["OnlineScaleSettings"] = None,
+ readiness_probe: Optional["ProbeSettings"] = None,
+ **kwargs
+ ):
+ super(PartialManagedOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, liveness_probe=liveness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs)
+ self.endpoint_compute_type = 'Managed' # type: str
+ self.readiness_probe = readiness_probe
+
+
+class PartialOnlineDeploymentPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineDeployment
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineDeployment'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ location: Optional[str] = None,
+ properties: Optional["PartialOnlineDeployment"] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(PartialOnlineDeploymentPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.location = location
+ self.properties = properties
+ self.tags = tags
+
+
+class PartialOnlineEndpoint(msrest.serialization.Model):
+ """Mutable online endpoint configuration.
+
+ :param traffic: Traffic rules on how the traffic will be routed across deployments.
+ :type traffic: dict[str, int]
+ """
+
+ _attribute_map = {
+ 'traffic': {'key': 'traffic', 'type': '{int}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ traffic: Optional[Dict[str, int]] = None,
+ **kwargs
+ ):
+ super(PartialOnlineEndpoint, self).__init__(**kwargs)
+ self.traffic = traffic
+
+
+class PartialOnlineEndpointPartialTrackedResource(msrest.serialization.Model):
+ """Strictly used in update requests.
+
+ :param identity: Service identity associated with a resource.
+ :type identity: ~azure_machine_learning_workspaces.models.ResourceIdentity
+ :param kind: Metadata used by portal/tooling/etc to render different UX experiences for
+ resources of the same type.
+ :type kind: str
+ :param location: The geo-location where the resource lives.
+ :type location: str
+ :param properties: Additional attributes of the entity.
+ :type properties: ~azure_machine_learning_workspaces.models.PartialOnlineEndpoint
+ :param tags: A set of tags. Resource tags.
+ :type tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'PartialOnlineEndpoint'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["ResourceIdentity"] = None,
+ kind: Optional[str] = None,
+ location: Optional[str] = None,
+ properties: Optional["PartialOnlineEndpoint"] = None,
+ tags: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(PartialOnlineEndpointPartialTrackedResource, self).__init__(**kwargs)
+ self.identity = identity
+ self.kind = kind
+ self.location = location
+ self.properties = properties
+ self.tags = tags
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ *,
+ assigned_user: Optional["AssignedUser"] = None,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = assigned_user
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to.
+ :vartype subnet_arm_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'subnet_arm_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+ self.subnet_arm_id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ private_endpoint: Optional["PrivateEndpoint"] = None,
+ private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.private_endpoint = private_endpoint
+ self.private_link_service_connection_state = private_link_service_connection_state
+ self.provisioning_state = None
+
+
+class PrivateEndpointConnectionListResult(msrest.serialization.Model):
+ """List of private endpoint connection associated with the specified workspace.
+
+ :param value: Array of private endpoint connections.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PrivateEndpointConnection"]] = None,
+ **kwargs
+ ):
+ super(PrivateEndpointConnectionListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ required_zone_names: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(**kwargs)
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = required_zone_names
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PrivateLinkResource"]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ description: Optional[str] = None,
+ actions_required: Optional[str] = None,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = status
+ self.description = description
+ self.actions_required = actions_required
+
+
+class ProbeSettings(msrest.serialization.Model):
+ """Deployment container liveness/readiness probe configuration.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param initial_delay: The delay before the first probe in ISO 8601 format.
+ :type initial_delay: ~datetime.timedelta
+ :param period: The length of time between probes in ISO 8601 format.
+ :type period: ~datetime.timedelta
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout: The probe timeout in ISO 8601 format.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'initial_delay': {'key': 'initialDelay', 'type': 'duration'},
+ 'period': {'key': 'period', 'type': 'duration'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ failure_threshold: Optional[int] = None,
+ initial_delay: Optional[datetime.timedelta] = None,
+ period: Optional[datetime.timedelta] = None,
+ success_threshold: Optional[int] = None,
+ timeout: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(ProbeSettings, self).__init__(**kwargs)
+ self.failure_threshold = failure_threshold
+ self.initial_delay = initial_delay
+ self.period = period
+ self.success_threshold = success_threshold
+ self.timeout = timeout
+
+
+class ProgressMetrics(msrest.serialization.Model):
+ """Progress metrics definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar completed_datapoint_count: The completed datapoint count.
+ :vartype completed_datapoint_count: long
+ :ivar incremental_dataset_last_refresh_time: The time of last successful incremental dataset
+ refresh in UTC.
+ :vartype incremental_dataset_last_refresh_time: ~datetime.datetime
+ :ivar skipped_datapoint_count: The skipped datapoint count.
+ :vartype skipped_datapoint_count: long
+ :ivar total_datapoint_count: The total datapoint count.
+ :vartype total_datapoint_count: long
+ """
+
+ _validation = {
+ 'completed_datapoint_count': {'readonly': True},
+ 'incremental_dataset_last_refresh_time': {'readonly': True},
+ 'skipped_datapoint_count': {'readonly': True},
+ 'total_datapoint_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'},
+ 'incremental_dataset_last_refresh_time': {'key': 'incrementalDatasetLastRefreshTime', 'type': 'iso-8601'},
+ 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'},
+ 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ProgressMetrics, self).__init__(**kwargs)
+ self.completed_datapoint_count = None
+ self.incremental_dataset_last_refresh_time = None
+ self.skipped_datapoint_count = None
+ self.total_datapoint_count = None
+
+
+class PyTorch(DistributionConfiguration):
+ """PyTorch distribution configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param process_count: Total process count for the distributed job.
+ :type process_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'process_count': {'key': 'processCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ process_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(PyTorch, self).__init__(**kwargs)
+ self.distribution_type = 'PyTorch' # type: str
+ self.process_count = process_count
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ type: Optional[str] = None,
+ limit: Optional[int] = None,
+ unit: Optional[Union[str, "QuotaUnit"]] = None,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = id
+ self.type = type
+ self.limit = limit
+ self.unit = unit
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ :param location: Region of workspace quota to be updated.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["QuotaBaseProperties"]] = None,
+ location: Optional[str] = None,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = value
+ self.location = location
+
+
+class Recurrence(msrest.serialization.Model):
+ """The workflow trigger recurrence for ComputeStartStop schedule type.
+
+ :param frequency: The recurrence frequency. Possible values include: "NotSpecified", "Second",
+ "Minute", "Hour", "Day", "Week", "Month", "Year".
+ :type frequency: str or ~azure_machine_learning_workspaces.models.RecurrenceFrequency
+ :param interval: The interval.
+ :type interval: int
+ :param start_time: The start time.
+ :type start_time: str
+ :param time_zone: The time zone.
+ :type time_zone: str
+ :param schedule: The recurrence schedule.
+ :type schedule: ~azure_machine_learning_workspaces.models.RecurrenceSchedule
+ """
+
+ _attribute_map = {
+ 'frequency': {'key': 'frequency', 'type': 'str'},
+ 'interval': {'key': 'interval', 'type': 'int'},
+ 'start_time': {'key': 'startTime', 'type': 'str'},
+ 'time_zone': {'key': 'timeZone', 'type': 'str'},
+ 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'},
+ }
+
+ def __init__(
+ self,
+ *,
+ frequency: Optional[Union[str, "RecurrenceFrequency"]] = None,
+ interval: Optional[int] = None,
+ start_time: Optional[str] = None,
+ time_zone: Optional[str] = None,
+ schedule: Optional["RecurrenceSchedule"] = None,
+ **kwargs
+ ):
+ super(Recurrence, self).__init__(**kwargs)
+ self.frequency = frequency
+ self.interval = interval
+ self.start_time = start_time
+ self.time_zone = time_zone
+ self.schedule = schedule
+
+
+class RecurrenceSchedule(msrest.serialization.Model):
+ """The recurrence schedule.
+
+ :param minutes: The minutes.
+ :type minutes: list[int]
+ :param hours: The hours.
+ :type hours: list[int]
+ :param week_days: The days of the week.
+ :type week_days: list[str or ~azure_machine_learning_workspaces.models.DaysOfWeek]
+ """
+
+ _attribute_map = {
+ 'minutes': {'key': 'minutes', 'type': '[int]'},
+ 'hours': {'key': 'hours', 'type': '[int]'},
+ 'week_days': {'key': 'weekDays', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ minutes: Optional[List[int]] = None,
+ hours: Optional[List[int]] = None,
+ week_days: Optional[List[Union[str, "DaysOfWeek"]]] = None,
+ **kwargs
+ ):
+ super(RecurrenceSchedule, self).__init__(**kwargs)
+ self.minutes = minutes
+ self.hours = hours
+ self.week_days = week_days
+
+
+class RegenerateEndpointKeysRequest(msrest.serialization.Model):
+ """RegenerateEndpointKeysRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_type: Required. Specification for which type of key to generate. Primary or
+ Secondary. Possible values include: "Primary", "Secondary".
+ :type key_type: str or ~azure_machine_learning_workspaces.models.KeyType
+ :param key_value: The value the key is set to.
+ :type key_value: str
+ """
+
+ _validation = {
+ 'key_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_type': {'key': 'keyType', 'type': 'str'},
+ 'key_value': {'key': 'keyValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key_type: Union[str, "KeyType"],
+ key_value: Optional[str] = None,
+ **kwargs
+ ):
+ super(RegenerateEndpointKeysRequest, self).__init__(**kwargs)
+ self.key_type = key_type
+ self.key_value = key_value
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ passwords: Optional[List["Password"]] = None,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = passwords
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = id
+
+
+class ResourceIdentity(msrest.serialization.Model):
+ """Service identity associated with a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: Client ID that is used when authenticating.
+ :vartype principal_id: str
+ :ivar tenant_id: AAD Tenant where this identity lives.
+ :vartype tenant_id: str
+ :param type: Defines values for a ResourceIdentity's type. Possible values include:
+ "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityAssignment
+ :param user_assigned_identities: Dictionary of the user assigned identities, key is ARM
+ resource ID of the UAI.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentityMeta]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentityMeta}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Optional[Union[str, "ResourceIdentityAssignment"]] = None,
+ user_assigned_identities: Optional[Dict[str, "UserAssignedIdentityMeta"]] = None,
+ **kwargs
+ ):
+ super(ResourceIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ reason_code: Optional[Union[str, "ReasonCode"]] = None,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = reason_code
+
+
+class Route(msrest.serialization.Model):
+ """Route.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param path: Required. The path for the route.
+ :type path: str
+ :param port: Required. The port for the route.
+ :type port: int
+ """
+
+ _validation = {
+ 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ 'port': {'required': True},
+ }
+
+ _attribute_map = {
+ 'path': {'key': 'path', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ path: str,
+ port: int,
+ **kwargs
+ ):
+ super(Route, self).__init__(**kwargs)
+ self.path = path
+ self.port = port
+
+
+class SasDatastoreCredentials(DatastoreCredentials):
+ """SAS datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: Storage container secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.SasDatastoreSecrets
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'},
+ }
+
+ def __init__(
+ self,
+ *,
+ secrets: Optional["SasDatastoreSecrets"] = None,
+ **kwargs
+ ):
+ super(SasDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'Sas' # type: str
+ self.secrets = secrets
+
+
+class SasDatastoreSecrets(DatastoreSecrets):
+ """Datastore SAS secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param sas_token: Storage container SAS token.
+ :type sas_token: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'sas_token': {'key': 'sasToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ sas_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(SasDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'Sas' # type: str
+ self.sas_token = sas_token
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_node_count: int,
+ min_node_count: Optional[int] = 0,
+ node_idle_time_before_scale_down: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = max_node_count
+ self.min_node_count = min_node_count
+ self.node_idle_time_before_scale_down = node_idle_time_before_scale_down
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ script_source: Optional[str] = None,
+ script_data: Optional[str] = None,
+ script_arguments: Optional[str] = None,
+ timeout: Optional[str] = None,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = script_source
+ self.script_data = script_data
+ self.script_arguments = script_arguments
+ self.timeout = timeout
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ startup_script: Optional["ScriptReference"] = None,
+ creation_script: Optional["ScriptReference"] = None,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = startup_script
+ self.creation_script = creation_script
+
+
+class ServiceManagedResourcesSettings(msrest.serialization.Model):
+ """ServiceManagedResourcesSettings.
+
+ :param cosmos_db: The settings for the service managed cosmosdb account.
+ :type cosmos_db: ~azure_machine_learning_workspaces.models.CosmosDbSettings
+ """
+
+ _attribute_map = {
+ 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cosmos_db: Optional["CosmosDbSettings"] = None,
+ **kwargs
+ ):
+ super(ServiceManagedResourcesSettings, self).__init__(**kwargs)
+ self.cosmos_db = cosmos_db
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: str,
+ client_secret: str,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+
+class ServicePrincipalDatastoreCredentials(DatastoreCredentials):
+ """Service Principal datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param authority_url: Authority URL used for authentication.
+ :type authority_url: str
+ :param client_id: Required. Service principal client ID.
+ :type client_id: str
+ :param resource_uri: Resource the service principal has access to.
+ :type resource_uri: str
+ :param secrets: Service principal secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.ServicePrincipalDatastoreSecrets
+ :param tenant_id: Required. ID of the tenant to which the service principal belongs.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ 'client_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'authority_url': {'key': 'authorityUrl', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'resource_uri': {'key': 'resourceUri', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: str,
+ tenant_id: str,
+ authority_url: Optional[str] = None,
+ resource_uri: Optional[str] = None,
+ secrets: Optional["ServicePrincipalDatastoreSecrets"] = None,
+ **kwargs
+ ):
+ super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'ServicePrincipal' # type: str
+ self.authority_url = authority_url
+ self.client_id = client_id
+ self.resource_uri = resource_uri
+ self.secrets = secrets
+ self.tenant_id = tenant_id
+
+
+class ServicePrincipalDatastoreSecrets(DatastoreSecrets):
+ """Datastore Service Principal secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param client_secret: Service principal secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_secret: Optional[str] = None,
+ **kwargs
+ ):
+ super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'ServicePrincipal' # type: str
+ self.client_secret = client_secret
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scripts: Optional["ScriptsToExecute"] = None,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = scripts
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ private_link_resource_id: Optional[str] = None,
+ group_id: Optional[str] = None,
+ request_message: Optional[str] = None,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = name
+ self.private_link_resource_id = private_link_resource_id
+ self.group_id = group_id
+ self.request_message = request_message
+ self.status = status
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ tier: Optional[str] = None,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = name
+ self.tier = tier
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceSku"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class SqlAdminDatastoreCredentials(DatastoreCredentials):
+ """SQL Admin datastore credentials configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param credentials_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type credentials_type: str or ~azure_machine_learning_workspaces.models.CredentialsType
+ :param secrets: SQL database secrets.
+ :type secrets: ~azure_machine_learning_workspaces.models.SqlAdminDatastoreSecrets
+ :param user_id: Required. SQL database user name.
+ :type user_id: str
+ """
+
+ _validation = {
+ 'credentials_type': {'required': True},
+ 'user_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'credentials_type': {'key': 'credentialsType', 'type': 'str'},
+ 'secrets': {'key': 'secrets', 'type': 'SqlAdminDatastoreSecrets'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_id: str,
+ secrets: Optional["SqlAdminDatastoreSecrets"] = None,
+ **kwargs
+ ):
+ super(SqlAdminDatastoreCredentials, self).__init__(**kwargs)
+ self.credentials_type = 'SqlAdmin' # type: str
+ self.secrets = secrets
+ self.user_id = user_id
+
+
+class SqlAdminDatastoreSecrets(DatastoreSecrets):
+ """Datastore SQL Admin secrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param secrets_type: Required. Credential type used to authentication with storage.Constant
+ filled by server. Possible values include: "AccountKey", "Certificate", "None", "Sas",
+ "ServicePrincipal", "SqlAdmin".
+ :type secrets_type: str or ~azure_machine_learning_workspaces.models.SecretsType
+ :param password: SQL database password.
+ :type password: str
+ """
+
+ _validation = {
+ 'secrets_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'secrets_type': {'key': 'secretsType', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ password: Optional[str] = None,
+ **kwargs
+ ):
+ super(SqlAdminDatastoreSecrets, self).__init__(**kwargs)
+ self.secrets_type = 'SqlAdmin' # type: str
+ self.password = password
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ :param leaf_domain_label: Leaf domain label of public endpoint.
+ :type leaf_domain_label: str
+ :param overwrite_existing_domain: Indicates whether to overwrite existing domain label.
+ :type overwrite_existing_domain: bool
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'},
+ 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "SslConfigurationStatus"]] = None,
+ cert: Optional[str] = None,
+ key: Optional[str] = None,
+ cname: Optional[str] = None,
+ leaf_domain_label: Optional[str] = None,
+ overwrite_existing_domain: Optional[bool] = None,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = status
+ self.cert = cert
+ self.key = key
+ self.cname = cname
+ self.leaf_domain_label = leaf_domain_label
+ self.overwrite_existing_domain = overwrite_existing_domain
+
+
+class StatusMessage(msrest.serialization.Model):
+ """Active message associated with project.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Service-defined message code.
+ :vartype code: str
+ :ivar created_time_utc: Time in UTC at which the message was created.
+ :vartype created_time_utc: ~datetime.datetime
+ :ivar level: Severity level of message. Possible values include: "Error", "Information",
+ "Warning".
+ :vartype level: str or ~azure_machine_learning_workspaces.models.StatusMessageLevel
+ :ivar message: A human-readable representation of the message code.
+ :vartype message: str
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'created_time_utc': {'readonly': True},
+ 'level': {'readonly': True},
+ 'message': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
+ 'level': {'key': 'level', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(StatusMessage, self).__init__(**kwargs)
+ self.code = None
+ self.created_time_utc = None
+ self.level = None
+ self.message = None
+
+
+class SweepJob(JobBase):
+ """Sweep job definition.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The asset description text.
+ :type description: str
+ :ivar interaction_endpoints: List of JobEndpoints.
+ For local jobs, a job endpoint will have an endpoint value of FileStreamObject.
+ :vartype interaction_endpoints: dict[str,
+ ~azure_machine_learning_workspaces.models.JobEndpoint]
+ :param job_type: Required. Specifies the type of job.Constant filled by server. Possible
+ values include: "Command", "Sweep", "Labeling".
+ :type job_type: str or ~azure_machine_learning_workspaces.models.JobType
+ :param properties: The asset property dictionary.
+ :type properties: dict[str, str]
+ :ivar provisioning_state: Specifies the job provisioning state. Possible values include:
+ "Succeeded", "Failed", "Canceled", "InProgress".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.JobProvisioningState
+ :param tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated.
+ :type tags: dict[str, str]
+ :param algorithm: Required. Type of the hyperparameter sampling algorithms. Possible values
+ include: "Grid", "Random", "Bayesian".
+ :type algorithm: str or ~azure_machine_learning_workspaces.models.SamplingAlgorithm
+ :param compute: Required. Compute binding for the job.
+ :type compute: ~azure_machine_learning_workspaces.models.ComputeConfiguration
+ :param early_termination: Early termination policies enable canceling poor-performing runs
+ before they complete.
+ :type early_termination: ~azure_machine_learning_workspaces.models.EarlyTerminationPolicy
+ :param experiment_name: The name of the experiment the job belongs to. If not set, the job is
+ placed in the "Default" experiment.
+ :type experiment_name: str
+ :param identity: Identity configuration. If set, this should be one of AmlToken,
+ ManagedIdentity or null.
+ Defaults to AmlToken if null.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityConfiguration
+ :param max_concurrent_trials: An upper bound on the number of trials performed in parallel.
+ :type max_concurrent_trials: int
+ :param max_total_trials: An upper bound on the number of trials to perform.
+ :type max_total_trials: int
+ :param objective: Required. Optimization objective.
+ :type objective: ~azure_machine_learning_workspaces.models.Objective
+ :ivar output: Location of the job output logs and artifacts.
+ :vartype output: ~azure_machine_learning_workspaces.models.JobOutput
+ :param priority: Job priority for scheduling policy. Only applies to AMLCompute.
+ Private preview feature and only available to users on the allow list.
+ :type priority: int
+ :param search_space: Required. A dictionary containing each parameter and its distribution. The
+ dictionary key is the name of the parameter.
+ :type search_space: dict[str, object]
+ :ivar status: The status of a job. Possible values include: "NotStarted", "Starting",
+ "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
+ "Failed", "Canceled", "NotResponding", "Paused", "Unknown".
+ :vartype status: str or ~azure_machine_learning_workspaces.models.JobStatus
+ :param timeout: The total timeout in ISO 8601 format. Only supports duration with precision as
+ low as Minutes.
+ :type timeout: ~datetime.timedelta
+ :param trial: Trial component definition.
+ :type trial: ~azure_machine_learning_workspaces.models.TrialComponent
+ """
+
+ _validation = {
+ 'interaction_endpoints': {'readonly': True},
+ 'job_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'algorithm': {'required': True},
+ 'compute': {'required': True},
+ 'objective': {'required': True},
+ 'output': {'readonly': True},
+ 'search_space': {'required': True},
+ 'status': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
+ 'job_type': {'key': 'jobType', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'algorithm': {'key': 'algorithm', 'type': 'str'},
+ 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
+ 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'},
+ 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
+ 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
+ 'objective': {'key': 'objective', 'type': 'Objective'},
+ 'output': {'key': 'output', 'type': 'JobOutput'},
+ 'priority': {'key': 'priority', 'type': 'int'},
+ 'search_space': {'key': 'searchSpace', 'type': '{object}'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ 'trial': {'key': 'trial', 'type': 'TrialComponent'},
+ }
+
+ def __init__(
+ self,
+ *,
+ algorithm: Union[str, "SamplingAlgorithm"],
+ compute: "ComputeConfiguration",
+ objective: "Objective",
+ search_space: Dict[str, object],
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ tags: Optional[Dict[str, str]] = None,
+ early_termination: Optional["EarlyTerminationPolicy"] = None,
+ experiment_name: Optional[str] = None,
+ identity: Optional["IdentityConfiguration"] = None,
+ max_concurrent_trials: Optional[int] = None,
+ max_total_trials: Optional[int] = None,
+ priority: Optional[int] = None,
+ timeout: Optional[datetime.timedelta] = None,
+ trial: Optional["TrialComponent"] = None,
+ **kwargs
+ ):
+ super(SweepJob, self).__init__(description=description, properties=properties, tags=tags, **kwargs)
+ self.job_type = 'Sweep' # type: str
+ self.algorithm = algorithm
+ self.compute = compute
+ self.early_termination = early_termination
+ self.experiment_name = experiment_name
+ self.identity = identity
+ self.max_concurrent_trials = max_concurrent_trials
+ self.max_total_trials = max_total_trials
+ self.objective = objective
+ self.output = None
+ self.priority = priority
+ self.search_space = search_space
+ self.status = None
+ self.timeout = timeout
+ self.trial = trial
+
+
+class SynapseSparkPoolProperties(msrest.serialization.Model):
+ """Properties specific to Synapse Spark pools.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["SynapseSparkPoolPropertiesautogenerated"] = None,
+ **kwargs
+ ):
+ super(SynapseSparkPoolProperties, self).__init__(**kwargs)
+ self.properties = properties
+
+
+class SynapseSpark(Compute, SynapseSparkPoolProperties):
+ """A SynapseSpark compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["SynapseSparkPoolPropertiesautogenerated"] = None,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(SynapseSpark, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs)
+ self.properties = properties
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = disable_local_auth
+
+
+class SynapseSparkPoolPropertiesautogenerated(msrest.serialization.Model):
+ """AKS properties.
+
+ :param auto_scale_properties: Auto scale properties.
+ :type auto_scale_properties: ~azure_machine_learning_workspaces.models.AutoScaleProperties
+ :param auto_pause_properties: Auto pause properties.
+ :type auto_pause_properties: ~azure_machine_learning_workspaces.models.AutoPauseProperties
+ :param spark_version: Spark version.
+ :type spark_version: str
+ :param node_count: The number of compute nodes currently assigned to the compute.
+ :type node_count: int
+ :param node_size: Node size.
+ :type node_size: str
+ :param node_size_family: Node size family.
+ :type node_size_family: str
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param resource_group: Name of the resource group in which workspace is located.
+ :type resource_group: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param pool_name: Pool name.
+ :type pool_name: str
+ """
+
+ _attribute_map = {
+ 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'},
+ 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'},
+ 'spark_version': {'key': 'sparkVersion', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'node_size': {'key': 'nodeSize', 'type': 'str'},
+ 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'},
+ 'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
+ 'resource_group': {'key': 'resourceGroup', 'type': 'str'},
+ 'workspace_name': {'key': 'workspaceName', 'type': 'str'},
+ 'pool_name': {'key': 'poolName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ auto_scale_properties: Optional["AutoScaleProperties"] = None,
+ auto_pause_properties: Optional["AutoPauseProperties"] = None,
+ spark_version: Optional[str] = None,
+ node_count: Optional[int] = None,
+ node_size: Optional[str] = None,
+ node_size_family: Optional[str] = None,
+ subscription_id: Optional[str] = None,
+ resource_group: Optional[str] = None,
+ workspace_name: Optional[str] = None,
+ pool_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(SynapseSparkPoolPropertiesautogenerated, self).__init__(**kwargs)
+ self.auto_scale_properties = auto_scale_properties
+ self.auto_pause_properties = auto_pause_properties
+ self.spark_version = spark_version
+ self.node_count = node_count
+ self.node_size = node_size
+ self.node_size_family = node_size_family
+ self.subscription_id = subscription_id
+ self.resource_group = resource_group
+ self.workspace_name = workspace_name
+ self.pool_name = pool_name
+
+
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ *,
+ created_by: Optional[str] = None,
+ created_by_type: Optional[Union[str, "CreatedByType"]] = None,
+ created_at: Optional[datetime.datetime] = None,
+ last_modified_by: Optional[str] = None,
+ last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None,
+ last_modified_at: Optional[datetime.datetime] = None,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = created_by
+ self.created_by_type = created_by_type
+ self.created_at = created_at
+ self.last_modified_by = last_modified_by
+ self.last_modified_by_type = last_modified_by_type
+ self.last_modified_at = last_modified_at
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class TensorFlow(DistributionConfiguration):
+ """TensorFlow distribution configuration.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param distribution_type: Required. Specifies the type of distribution framework.Constant
+ filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi".
+ :type distribution_type: str or ~azure_machine_learning_workspaces.models.DistributionType
+ :param parameter_server_count: Number of parameter server tasks.
+ :type parameter_server_count: int
+ :param worker_count: Number of workers. Overwrites the node count in compute binding.
+ :type worker_count: int
+ """
+
+ _validation = {
+ 'distribution_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'distribution_type': {'key': 'distributionType', 'type': 'str'},
+ 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
+ 'worker_count': {'key': 'workerCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ parameter_server_count: Optional[int] = None,
+ worker_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(TensorFlow, self).__init__(**kwargs)
+ self.distribution_type = 'TensorFlow' # type: str
+ self.parameter_server_count = parameter_server_count
+ self.worker_count = worker_count
+
+
+class TrialComponent(msrest.serialization.Model):
+ """Trial component definition.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code_id: ARM resource ID of the code asset.
+ :type code_id: str
+ :param command: Required. The command to execute on startup of the job. eg. "python train.py".
+ :type command: str
+ :param distribution: Distribution configuration of the job. If set, this should be one of Mpi,
+ Tensorflow, PyTorch, or null.
+ :type distribution: ~azure_machine_learning_workspaces.models.DistributionConfiguration
+ :param environment_id: The ARM resource ID of the Environment specification for the job.
+ :type environment_id: str
+ :param environment_variables: Environment variables included in the job.
+ :type environment_variables: dict[str, str]
+ :param input_data_bindings: Mapping of input data bindings used in the job.
+ :type input_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.InputDataBinding]
+ :param output_data_bindings: Mapping of output data bindings used in the job.
+ :type output_data_bindings: dict[str,
+ ~azure_machine_learning_workspaces.models.OutputDataBinding]
+ :param timeout: The max run duration in ISO 8601 format, after which the trial component will
+ be cancelled.
+ Only supports duration with precision as low as Seconds.
+ :type timeout: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'},
+ }
+
+ _attribute_map = {
+ 'code_id': {'key': 'codeId', 'type': 'str'},
+ 'command': {'key': 'command', 'type': 'str'},
+ 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'},
+ 'environment_id': {'key': 'environmentId', 'type': 'str'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
+ 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
+ 'timeout': {'key': 'timeout', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ command: str,
+ code_id: Optional[str] = None,
+ distribution: Optional["DistributionConfiguration"] = None,
+ environment_id: Optional[str] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ input_data_bindings: Optional[Dict[str, "InputDataBinding"]] = None,
+ output_data_bindings: Optional[Dict[str, "OutputDataBinding"]] = None,
+ timeout: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(TrialComponent, self).__init__(**kwargs)
+ self.code_id = code_id
+ self.command = command
+ self.distribution = distribution
+ self.environment_id = environment_id
+ self.environment_variables = environment_variables
+ self.input_data_bindings = input_data_bindings
+ self.output_data_bindings = output_data_bindings
+ self.timeout = timeout
+
+
+class TruncationSelectionPolicy(EarlyTerminationPolicy):
+ """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param delay_evaluation: Number of intervals by which to delay the first evaluation.
+ :type delay_evaluation: int
+ :param evaluation_interval: Interval (number of runs) between policy evaluations.
+ :type evaluation_interval: int
+ :param policy_type: Required. Name of policy configuration.Constant filled by server. Possible
+ values include: "Bandit", "MedianStopping", "TruncationSelection".
+ :type policy_type: str or ~azure_machine_learning_workspaces.models.EarlyTerminationPolicyType
+ :param truncation_percentage: The percentage of runs to cancel at each evaluation interval.
+ :type truncation_percentage: int
+ """
+
+ _validation = {
+ 'policy_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
+ 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
+ 'policy_type': {'key': 'policyType', 'type': 'str'},
+ 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ delay_evaluation: Optional[int] = None,
+ evaluation_interval: Optional[int] = None,
+ truncation_percentage: Optional[int] = None,
+ **kwargs
+ ):
+ super(TruncationSelectionPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs)
+ self.policy_type = 'TruncationSelection' # type: str
+ self.truncation_percentage = truncation_percentage
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ limit: Optional[int] = None,
+ status: Optional[Union[str, "Status"]] = None,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = limit
+ self.unit = None
+ self.status = status
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ admin_user_name: str,
+ admin_user_ssh_public_key: Optional[str] = None,
+ admin_user_password: Optional[str] = None,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = admin_user_name
+ self.admin_user_ssh_public_key = admin_user_ssh_public_key
+ self.admin_user_password = admin_user_password
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class UserAssignedIdentityMeta(msrest.serialization.Model):
+ """User assigned identities associated with a resource.
+
+ :param client_id: Aka application ID, a unique identifier generated by Azure AD that is tied to
+ an application and service principal during its initial provisioning.
+ :type client_id: str
+ :param principal_id: The object ID of the service principal object for your managed identity
+ that is used to grant role-based access to an Azure resource.
+ :type principal_id: str
+ """
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: Optional[str] = None,
+ principal_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(UserAssignedIdentityMeta, self).__init__(**kwargs)
+ self.client_id = client_id
+ self.principal_id = principal_id
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["VirtualMachineProperties"] = None,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = properties
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = id
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ :param is_notebook_instance_compute: Indicates whether this compute will be used for running
+ notebooks.
+ :type is_notebook_instance_compute: bool
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ virtual_machine_size: Optional[str] = None,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ is_notebook_instance_compute: Optional[bool] = None,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = virtual_machine_size
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+ self.is_notebook_instance_compute = is_notebook_instance_compute
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = administrator_account
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ }
+
+ def __init__(
+ self,
+ *,
+ estimated_vm_prices: Optional["EstimatedVmPrices"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = estimated_vm_prices
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param value: The list of virtual machine sizes supported by AmlCompute.
+ :type value: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["VirtualMachineSize"]] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ public_key_data: Optional[str] = None,
+ private_key_data: Optional[str] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = username
+ self.password = password
+ self.public_key_data = public_key_data
+ self.private_key_data = private_key_data
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :ivar tenant_id: The tenant id associated with this workspace.
+ :vartype tenant_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ key_vault: Optional[str] = None,
+ application_insights: Optional[str] = None,
+ container_registry: Optional[str] = None,
+ storage_account: Optional[str] = None,
+ discovery_url: Optional[str] = None,
+ encryption: Optional["EncryptionProperty"] = None,
+ hbi_workspace: Optional[bool] = False,
+ image_build_compute: Optional[str] = None,
+ allow_public_access_when_behind_vnet: Optional[bool] = False,
+ shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None,
+ service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None,
+ primary_user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(**kwargs)
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.workspace_id = None
+ self.description = description
+ self.friendly_name = friendly_name
+ self.key_vault = key_vault
+ self.application_insights = application_insights
+ self.container_registry = container_registry
+ self.storage_account = storage_account
+ self.discovery_url = discovery_url
+ self.provisioning_state = None
+ self.encryption = encryption
+ self.hbi_workspace = hbi_workspace
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = image_build_compute
+ self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = shared_private_link_resources
+ self.notebook_info = None
+ self.service_managed_resources_settings = service_managed_resources_settings
+ self.primary_user_assigned_identity = primary_user_assigned_identity
+ self.tenant_id = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ value_format: Optional[Union[str, "ValueFormat"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+ self.value_format = value_format
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Workspace"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ restrictions: Optional[List["Restriction"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = restrictions
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ identity: Optional["Identity"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ image_build_compute: Optional[str] = None,
+ service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None,
+ primary_user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = tags
+ self.sku = sku
+ self.identity = identity
+ self.description = description
+ self.friendly_name = friendly_name
+ self.image_build_compute = image_build_compute
+ self.service_managed_resources_settings = service_managed_resources_settings
+ self.primary_user_assigned_identity = primary_user_assigned_identity
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
new file mode 100644
index 00000000000..5aa4d95e2b4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
@@ -0,0 +1,63 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._compute_operations import ComputeOperations
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._batch_endpoints_operations import BatchEndpointsOperations
+from ._batch_deployments_operations import BatchDeploymentsOperations
+from ._code_containers_operations import CodeContainersOperations
+from ._code_versions_operations import CodeVersionsOperations
+from ._data_containers_operations import DataContainersOperations
+from ._data_versions_operations import DataVersionsOperations
+from ._datastores_operations import DatastoresOperations
+from ._environment_containers_operations import EnvironmentContainersOperations
+from ._environment_specification_versions_operations import EnvironmentSpecificationVersionsOperations
+from ._jobs_operations import JobsOperations
+from ._labeling_jobs_operations import LabelingJobsOperations
+from ._model_containers_operations import ModelContainersOperations
+from ._model_versions_operations import ModelVersionsOperations
+from ._online_endpoints_operations import OnlineEndpointsOperations
+from ._online_deployments_operations import OnlineDeploymentsOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._workspace_skus_operations import WorkspaceSkusOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'ComputeOperations',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'WorkspaceConnectionsOperations',
+ 'BatchEndpointsOperations',
+ 'BatchDeploymentsOperations',
+ 'CodeContainersOperations',
+ 'CodeVersionsOperations',
+ 'DataContainersOperations',
+ 'DataVersionsOperations',
+ 'DatastoresOperations',
+ 'EnvironmentContainersOperations',
+ 'EnvironmentSpecificationVersionsOperations',
+ 'JobsOperations',
+ 'LabelingJobsOperations',
+ 'ModelContainersOperations',
+ 'ModelVersionsOperations',
+ 'OnlineEndpointsOperations',
+ 'OnlineDeploymentsOperations',
+ 'WorkspaceFeaturesOperations',
+ 'WorkspaceSkusOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_batch_deployments_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_batch_deployments_operations.py
new file mode 100644
index 00000000000..8e947b6d8b1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_batch_deployments_operations.py
@@ -0,0 +1,440 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class BatchDeploymentsOperations(object):
+ """BatchDeploymentsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.BatchDeploymentTrackedResourceArmPaginatedResult"]
+ """Lists Batch inference deployments in the workspace.
+
+ Lists Batch inference deployments in the workspace.
+
+ :param endpoint_name: Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Top of list.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BatchDeploymentTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('BatchDeploymentTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments'} # type: ignore
+
+ def delete(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete Batch Inference deployment.
+
+ Delete Batch Inference deployment.
+
+ :param endpoint_name: Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference deployment identifier.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def get(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.BatchDeploymentTrackedResource"
+ """Gets a batch inference deployment by id.
+
+ Gets a batch inference deployment by id.
+
+ :param endpoint_name: Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The identifier for the Batch deployments.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def update(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialBatchDeploymentPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.BatchDeploymentTrackedResource"
+ """Update a batch inference deployment.
+
+ Update a batch inference deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The identifier for the Batch inference deployment.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Batch inference deployment definition object.
+ :type body: ~azure_machine_learning_workspaces.models.PartialBatchDeploymentPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialBatchDeploymentPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def create_or_update(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.BatchDeploymentTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.BatchDeploymentTrackedResource"
+ """Creates/updates a batch inference deployment.
+
+ Creates/updates a batch inference deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The identifier for the Batch inference deployment.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Batch inference deployment definition object.
+ :type body: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'BatchDeploymentTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('BatchDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_batch_endpoints_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_batch_endpoints_operations.py
new file mode 100644
index 00000000000..deee54c1906
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_batch_endpoints_operations.py
@@ -0,0 +1,481 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class BatchEndpointsOperations(object):
+ """BatchEndpointsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ count=None, # type: Optional[int]
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.BatchEndpointTrackedResourceArmPaginatedResult"]
+ """Lists Batch inference endpoint in the workspace.
+
+ Lists Batch inference endpoint in the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param count: Number of endpoints to be retrieved in a page of results.
+ :type count: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BatchEndpointTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.BatchEndpointTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('BatchEndpointTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints'} # type: ignore
+
+ def delete(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete Batch Inference Endpoint.
+
+ Delete Batch Inference Endpoint.
+
+ :param endpoint_name: Inference Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ def get(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.BatchEndpointTrackedResource"
+ """Gets a batch inference endpoint by name.
+
+ Gets a batch inference endpoint by name.
+
+ :param endpoint_name: Name for the Batch Endpoint.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ def update(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialBatchEndpointPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.BatchEndpointTrackedResource"
+ """Update a batch inference endpoint.
+
+ Update a batch inference endpoint.
+
+ :param endpoint_name: Name for the Batch inference endpoint.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Mutable batch inference endpoint definition object.
+ :type body: ~azure_machine_learning_workspaces.models.PartialBatchEndpointPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialBatchEndpointPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ def create_or_update(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.BatchEndpointTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.BatchEndpointTrackedResource"
+ """Creates a batch inference endpoint.
+
+ Creates a batch inference endpoint.
+
+ :param endpoint_name: Name for the Batch inference endpoint.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Batch inference endpoint definition object.
+ :type body: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BatchEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.BatchEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.BatchEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'BatchEndpointTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('BatchEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} # type: ignore
+
+ def list_keys(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EndpointAuthKeys"
+ """Lists batch Inference Endpoint keys.
+
+ Lists batch Inference Endpoint keys.
+
+ :param endpoint_name: Inference Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthKeys, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthKeys"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthKeys', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_containers_operations.py
new file mode 100644
index 00000000000..9152d353e2b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_containers_operations.py
@@ -0,0 +1,336 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeContainersOperations(object):
+ """CodeContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.CodeContainerResourceArmPaginatedResult"]
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.CodeContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.CodeContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_versions_operations.py
new file mode 100644
index 00000000000..311ad959eac
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_code_versions_operations.py
@@ -0,0 +1,362 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class CodeVersionsOperations(object):
+ """CodeVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.CodeVersionResourceArmPaginatedResult"]
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.CodeVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('CodeVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.CodeVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.CodeVersionResource"
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CodeVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.CodeVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.CodeVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'CodeVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CodeVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_compute_operations.py
new file mode 100644
index 00000000000..32ed0cbaa9b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_compute_operations.py
@@ -0,0 +1,1117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ComputeOperations(object):
+ """ComputeOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedComputeResourcesList"]
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.AmlComputeNodesInformation"]
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeSecrets"
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ def _start_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._start_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def begin_start(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._start_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def _stop_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._stop_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def begin_stop(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._stop_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def restart(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
+
+ def update_schedules(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters=None, # type: Optional["models.ComputeSchedules"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Updates schedules of a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: The object for updating schedules of specified ComputeInstance.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeSchedules
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update_schedules.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if parameters is not None:
+ body_content = self._serialize.body(parameters, 'ComputeSchedules')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ update_schedules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateSchedules'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_containers_operations.py
new file mode 100644
index 00000000000..c79da32dd96
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_containers_operations.py
@@ -0,0 +1,336 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DataContainersOperations(object):
+ """DataContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.DataContainerResourceArmPaginatedResult"]
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.DataContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DataContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_versions_operations.py
new file mode 100644
index 00000000000..120f7aa15ce
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_data_versions_operations.py
@@ -0,0 +1,368 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DataVersionsOperations(object):
+ """DataVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skip=None, # type: Optional[str]
+ tags=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.DataVersionResourceArmPaginatedResult"]
+ """List data versions.
+
+ List data versions.
+
+ :param name: Data name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param tags: Comma-separated list of tag names (and optionally values). Example:
+ tag1,tag2=value2.
+ :type tags: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DataVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.DataVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if tags is not None:
+ query_parameters['$tags'] = self._serialize.query("tags", tags, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('DataVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DataVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DataVersionResource"
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DataVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DataVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DataVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DataVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DataVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_datastores_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_datastores_operations.py
new file mode 100644
index 00000000000..481095ea283
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_datastores_operations.py
@@ -0,0 +1,437 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DatastoresOperations(object):
+ """DatastoresOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ count=30, # type: Optional[int]
+ is_default=None, # type: Optional[bool]
+ names=None, # type: Optional[List[str]]
+ search_text=None, # type: Optional[str]
+ order_by=None, # type: Optional[str]
+ order_by_asc=False, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.DatastorePropertiesResourceArmPaginatedResult"]
+ """List datastores.
+
+ List datastores.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :param is_default: Filter down to the workspace default datastore.
+ :type is_default: bool
+ :param names: Names of datastores to return.
+ :type names: list[str]
+ :param search_text: Text to search for in the datastore names.
+ :type search_text: str
+ :param order_by: Order by property (createdtime | modifiedtime | name).
+ :type order_by: str
+ :param order_by_asc: Order by property in ascending order.
+ :type order_by_asc: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either DatastorePropertiesResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.DatastorePropertiesResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if is_default is not None:
+ query_parameters['isDefault'] = self._serialize.query("is_default", is_default, 'bool')
+ if names is not None:
+ query_parameters['names'] = self._serialize.query("names", names, '[str]')
+ if search_text is not None:
+ query_parameters['searchText'] = self._serialize.query("search_text", search_text, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if order_by_asc is not None:
+ query_parameters['orderByAsc'] = self._serialize.query("order_by_asc", order_by_asc, 'bool')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('DatastorePropertiesResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete datastore.
+
+ Delete datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DatastorePropertiesResource"
+ """Get datastore.
+
+ Get datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DatastorePropertiesResource"
+ skip_validation=False, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DatastorePropertiesResource"
+ """Create or update datastore.
+
+ Create or update datastore.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Datastore entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :param skip_validation: Flag to skip validation.
+ :type skip_validation: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastorePropertiesResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastorePropertiesResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastorePropertiesResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_validation is not None:
+ query_parameters['skipValidation'] = self._serialize.query("skip_validation", skip_validation, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DatastorePropertiesResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('DatastorePropertiesResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} # type: ignore
+
+ def list_secrets(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DatastoreSecrets"
+ """Get datastore secrets.
+
+ Get datastore secrets.
+
+ :param name: Datastore name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DatastoreSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DatastoreSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DatastoreSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_secrets.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DatastoreSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_secrets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_containers_operations.py
new file mode 100644
index 00000000000..5ce6648894d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_containers_operations.py
@@ -0,0 +1,336 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentContainersOperations(object):
+ """EnvironmentContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.EnvironmentContainerResourceArmPaginatedResult"]
+ """List containers.
+
+ List containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.EnvironmentContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.EnvironmentContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_specification_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_specification_versions_operations.py
new file mode 100644
index 00000000000..2126a011860
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_environment_specification_versions_operations.py
@@ -0,0 +1,362 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class EnvironmentSpecificationVersionsOperations(object):
+ """EnvironmentSpecificationVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]
+ """List versions.
+
+ List versions.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either EnvironmentSpecificationVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentSpecificationVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.EnvironmentSpecificationVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EnvironmentSpecificationVersionResource"
+ """Creates or updates an EnvironmentSpecificationVersion.
+
+ Creates or updates an EnvironmentSpecificationVersion.
+
+ :param name: Name of EnvironmentSpecificationVersion.
+ :type name: str
+ :param version: Version of EnvironmentSpecificationVersion.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Definition of EnvironmentSpecificationVersion.
+ :type body: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EnvironmentSpecificationVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EnvironmentSpecificationVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EnvironmentSpecificationVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'EnvironmentSpecificationVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('EnvironmentSpecificationVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_jobs_operations.py
new file mode 100644
index 00000000000..e7e5e342e98
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_jobs_operations.py
@@ -0,0 +1,479 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class JobsOperations(object):
+ """JobsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ job_type=None, # type: Optional[str]
+ tags=None, # type: Optional[str]
+ tag=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.JobBaseResourceArmPaginatedResult"]
+ """Lists Jobs in the workspace.
+
+ Lists Jobs in the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param job_type: Type of job to be returned.
+ :type job_type: str
+ :param tags: Tags for job to be returned.
+ :type tags: str
+ :param tag: Jobs returned will have this tag key.
+ :type tag: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.JobBaseResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if job_type is not None:
+ query_parameters['jobType'] = self._serialize.query("job_type", job_type, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('JobBaseResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs'} # type: ignore
+
+ def _delete_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def begin_delete(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes a Job (asynchronous).
+
+ Deletes a Job (asynchronous).
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def get(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.JobBaseResource"
+ """Gets a Job by name/id.
+
+ Gets a Job by name/id.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def create_or_update(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.JobBaseResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.JobBaseResource"
+ """Creates and executes a Job.
+
+ Creates and executes a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Job definition object.
+ :type body: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: JobBaseResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.JobBaseResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.JobBaseResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'JobBaseResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('JobBaseResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} # type: ignore
+
+ def cancel(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Cancels a Job.
+
+ Cancels a Job.
+
+ :param id: The name and identifier for the Job.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.cancel.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_labeling_jobs_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_labeling_jobs_operations.py
new file mode 100644
index 00000000000..7affee648b7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_labeling_jobs_operations.py
@@ -0,0 +1,755 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class LabelingJobsOperations(object):
+ """LabelingJobsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ count=None, # type: Optional[int]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.LabelingJobResourceArmPaginatedResult"]
+ """Lists labeling jobs in the workspace.
+
+ Lists labeling jobs in the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param count: Number of labeling jobs to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.LabelingJobResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('LabelingJobResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs'} # type: ignore
+
+ def delete(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a labeling job.
+
+ Delete a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def get(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ include_job_instructions=None, # type: Optional[bool]
+ include_label_categories=None, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.LabelingJobResource"
+ """Gets a labeling job by name/id.
+
+ Gets a labeling job by name/id.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param include_job_instructions: Boolean value to indicate whether to include JobInstructions
+ in response.
+ :type include_job_instructions: bool
+ :param include_label_categories: Boolean value to indicate Whether to include LabelCategories
+ in response.
+ :type include_label_categories: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: LabelingJobResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if include_job_instructions is not None:
+ query_parameters['includeJobInstructions'] = self._serialize.query("include_job_instructions", include_job_instructions, 'bool')
+ if include_label_categories is not None:
+ query_parameters['includeLabelCategories'] = self._serialize.query("include_label_categories", include_label_categories, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.LabelingJobResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.LabelingJobResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'LabelingJobResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.LabelingJobResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.LabelingJobResource"]
+ """Creates or updates a labeling job (asynchronous).
+
+ Creates or updates a labeling job (asynchronous).
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: LabelingJob definition object.
+ :type body: ~azure_machine_learning_workspaces.models.LabelingJobResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either LabelingJobResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.LabelingJobResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.LabelingJobResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('LabelingJobResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} # type: ignore
+
+ def _export_labels_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ExportSummary"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.ExportSummary"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ExportSummary"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._export_labels_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ExportSummary')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _export_labels_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
+
+ def begin_export_labels(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ExportSummary"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ExportSummary"]
+ """Export labels from a labeling job (asynchronous).
+
+ Export labels from a labeling job (asynchronous).
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The export summary.
+ :type body: ~azure_machine_learning_workspaces.models.ExportSummary
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ExportSummary or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ExportSummary]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ExportSummary"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._export_labels_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ExportSummary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_export_labels.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} # type: ignore
+
+ def pause(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Pause a labeling job.
+
+ Pause a labeling job.
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.pause.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ pause.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause'} # type: ignore
+
+ def _resume_initial(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resume_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _resume_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
+
+ def begin_resume(
+ self,
+ id, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Resume a labeling job (asynchronous).
+
+ Resume a labeling job (asynchronous).
+
+ :param id: The name and identifier for the LabelingJob.
+ :type id: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._resume_initial(
+ id=id,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'id': self._serialize.url("id", id, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resume.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_containers_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_containers_operations.py
new file mode 100644
index 00000000000..f52ed625542
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_containers_operations.py
@@ -0,0 +1,341 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelContainersOperations(object):
+ """ModelContainersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ count=None, # type: Optional[int]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ModelContainerResourceArmPaginatedResult"]
+ """List model containers.
+
+ List model containers.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param count: Maximum number of results to return.
+ :type count: int
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ModelContainerResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelContainerResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete container.
+
+ Delete container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelContainerResource"
+ """Get container.
+
+ Get container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ModelContainerResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelContainerResource"
+ """Create or update container.
+
+ Create or update container.
+
+ :param name: Container name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Container entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelContainerResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelContainerResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelContainerResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelContainerResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelContainerResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_versions_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_versions_operations.py
new file mode 100644
index 00000000000..e38931c2aee
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_model_versions_operations.py
@@ -0,0 +1,389 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelVersionsOperations(object):
+ """ModelVersionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ version=None, # type: Optional[str]
+ description=None, # type: Optional[str]
+ offset=None, # type: Optional[int]
+ tags=None, # type: Optional[str]
+ properties=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ModelVersionResourceArmPaginatedResult"]
+ """List model versions.
+
+ List model versions.
+
+ :param name: Model name.
+ :type name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param version: Model version.
+ :type version: str
+ :param description: Model description.
+ :type description: str
+ :param offset: Number of initial results to skip.
+ :type offset: int
+ :param tags: Comma-separated list of tag names (and optionally values). Example:
+ tag1,tag2=value2.
+ :type tags: str
+ :param properties: Comma-separated list of property names (and optionally values). Example:
+ prop1,prop2=value2.
+ :type properties: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ModelVersionResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if version is not None:
+ query_parameters['version'] = self._serialize.query("version", version, 'str')
+ if description is not None:
+ query_parameters['description'] = self._serialize.query("description", description, 'str')
+ if offset is not None:
+ query_parameters['offset'] = self._serialize.query("offset", offset, 'int')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ModelVersionResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions'} # type: ignore
+
+ def delete(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete version.
+
+ Delete version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ def get(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelVersionResource"
+ """Get version.
+
+ Get version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
+
+ def create_or_update(
+ self,
+ name, # type: str
+ version, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.ModelVersionResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ModelVersionResource"
+ """Create or update version.
+
+ Create or update version.
+
+ :param name: Container name.
+ :type name: str
+ :param version: Version identifier.
+ :type version: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Version entity to create or update.
+ :type body: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ModelVersionResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ModelVersionResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ModelVersionResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'version': self._serialize.url("version", version, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'ModelVersionResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ModelVersionResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_deployments_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_deployments_operations.py
new file mode 100644
index 00000000000..8ccc0acdbf5
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_deployments_operations.py
@@ -0,0 +1,731 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineDeploymentsOperations(object):
+ """OnlineDeploymentsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ order_by=None, # type: Optional[str]
+ top=None, # type: Optional[int]
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]
+ """List Inference Endpoint Deployments.
+
+ List Inference Endpoint Deployments.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param order_by: Ordering of list.
+ :type order_by: str
+ :param top: Top of list.
+ :type top: int
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if order_by is not None:
+ query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str')
+ if top is not None:
+ query_parameters['$top'] = self._serialize.query("top", top, 'int')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments'} # type: ignore
+
+ def _delete_initial(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Delete Inference Endpoint Deployment (asynchronous).
+
+ Delete Inference Endpoint Deployment (asynchronous).
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def get(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineDeploymentTrackedResource"
+ """Get Inference Deployment Deployment.
+
+ Get Inference Deployment Deployment.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineDeploymentTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineDeploymentPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.OnlineDeploymentTrackedResource"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineDeploymentTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineDeploymentPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def begin_update(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineDeploymentPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineDeploymentTrackedResource"]
+ """Update Online Deployment (asynchronous).
+
+ Update Online Deployment (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineDeploymentPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineDeploymentTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineDeploymentTrackedResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineDeploymentTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineDeploymentTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineDeploymentTrackedResource"]
+ """Create or update Inference Endpoint Deployment (asynchronous).
+
+ Create or update Inference Endpoint Deployment (asynchronous).
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: Inference Endpoint Deployment name.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Inference Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineDeploymentTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineDeploymentTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineDeploymentTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ deployment_name=deployment_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineDeploymentTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} # type: ignore
+
+ def get_logs(
+ self,
+ endpoint_name, # type: str
+ deployment_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.DeploymentLogsRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.DeploymentLogs"
+ """Polls an Endpoint operation.
+
+ Polls an Endpoint operation.
+
+ :param endpoint_name: Inference endpoint name.
+ :type endpoint_name: str
+ :param deployment_name: The name and identifier for the endpoint.
+ :type deployment_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: The request containing parameters for retrieving logs.
+ :type body: ~azure_machine_learning_workspaces.models.DeploymentLogsRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: DeploymentLogs, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.DeploymentLogs
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DeploymentLogs"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_logs.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'DeploymentLogsRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('DeploymentLogs', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_logs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_endpoints_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_endpoints_operations.py
new file mode 100644
index 00000000000..f031191b4c6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_online_endpoints_operations.py
@@ -0,0 +1,914 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class OnlineEndpointsOperations(object):
+ """OnlineEndpointsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ name=None, # type: Optional[str]
+ count=None, # type: Optional[int]
+ compute_type=None, # type: Optional[Union[str, "models.EndpointComputeType"]]
+ skip=None, # type: Optional[str]
+ tags=None, # type: Optional[str]
+ properties=None, # type: Optional[str]
+ order_by=None, # type: Optional[Union[str, "models.OrderString"]]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OnlineEndpointTrackedResourceArmPaginatedResult"]
+ """List Online Endpoints.
+
+ List Online Endpoints.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param name: Name of the endpoint.
+ :type name: str
+ :param count: Number of endpoints to be retrieved in a page of results.
+ :type count: int
+ :param compute_type: EndpointComputeType to be filtered by.
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.EndpointComputeType
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param tags: A set of tags with which to filter the returned models. It is a comma separated
+ string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned models. It is a comma
+ separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param order_by: The option to order the response.
+ :type order_by: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResourceArmPaginatedResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResourceArmPaginatedResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if name is not None:
+ query_parameters['name'] = self._serialize.query("name", name, 'str')
+ if count is not None:
+ query_parameters['count'] = self._serialize.query("count", count, 'int')
+ if compute_type is not None:
+ query_parameters['computeType'] = self._serialize.query("compute_type", compute_type, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if order_by is not None:
+ query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResourceArmPaginatedResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints'} # type: ignore
+
+ def _delete_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Delete Online Endpoint (asynchronous).
+
+ Delete Online Endpoint (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def get(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineEndpointTrackedResource"
+ """Get Online Endpoint.
+
+ Get Online Endpoint.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OnlineEndpointTrackedResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineEndpointPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.OnlineEndpointTrackedResource"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OnlineEndpointTrackedResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'PartialOnlineEndpointPartialTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def begin_update(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.PartialOnlineEndpointPartialTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineEndpointTrackedResource"]
+ """Update Online Endpoint (asynchronous).
+
+ Update Online Endpoint (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.PartialOnlineEndpointPartialTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineEndpointTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.OnlineEndpointTrackedResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'OnlineEndpointTrackedResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.OnlineEndpointTrackedResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.OnlineEndpointTrackedResource"]
+ """Create or update Online Endpoint (asynchronous).
+
+ Create or update Online Endpoint (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: Online Endpoint entity to apply during operation.
+ :type body: ~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OnlineEndpointTrackedResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.OnlineEndpointTrackedResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OnlineEndpointTrackedResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('OnlineEndpointTrackedResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} # type: ignore
+
+ def list_keys(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EndpointAuthKeys"
+ """List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ List EndpointAuthKeys for an Endpoint using Key-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthKeys, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthKeys
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthKeys"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthKeys', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys'} # type: ignore
+
+ def _regenerate_keys_initial(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.RegenerateEndpointKeysRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._regenerate_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(body, 'RegenerateEndpointKeysRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _regenerate_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ def begin_regenerate_keys(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ body, # type: "models.RegenerateEndpointKeysRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous).
+
+ Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous).
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param body: RegenerateKeys request .
+ :type body: ~azure_machine_learning_workspaces.models.RegenerateEndpointKeysRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._regenerate_keys_initial(
+ endpoint_name=endpoint_name,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ body=body,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} # type: ignore
+
+ def get_token(
+ self,
+ endpoint_name, # type: str
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.EndpointAuthToken"
+ """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication.
+
+ :param endpoint_name: Online Endpoint name.
+ :type endpoint_name: str
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: EndpointAuthToken, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.EndpointAuthToken
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.EndpointAuthToken"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('EndpointAuthToken', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
new file mode 100644
index 00000000000..3da89c697a8
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations(object):
+ """Operations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OperationListResult"]
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..6d5bcaba699
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,322 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations(object):
+ """PrivateEndpointConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PrivateEndpointConnectionListResult"]
+ """List all the private endpoint connections associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PrivateEndpointConnectionListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ properties, # type: "models.PrivateEndpointConnection"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..e9e4f6776f1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
@@ -0,0 +1,104 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations(object):
+ """PrivateLinkResourcesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateLinkResourceListResult"
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
new file mode 100644
index 00000000000..cd7eb542304
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
@@ -0,0 +1,182 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations(object):
+ """QuotasOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def update(
+ self,
+ location, # type: str
+ parameters, # type: "models.QuotaUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.UpdateWorkspaceQuotasResult"
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListWorkspaceQuotas"]
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
new file mode 100644
index 00000000000..4d47798227a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations(object):
+ """UsagesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListUsagesResult"]
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..481e3f27479
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,100 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations(object):
+ """VirtualMachineSizesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.VirtualMachineSizeListResult"
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..23c2d4b0a2d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
@@ -0,0 +1,329 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations(object):
+ """WorkspaceConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ target=None, # type: Optional[str]
+ category=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedWorkspaceConnectionsList"]
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ def create(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ parameters, # type: "models.WorkspaceConnection"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Add a new workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..424da26dcbc
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
@@ -0,0 +1,122 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations(object):
+ """WorkspaceFeaturesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListAmlUserFeatureResult"]
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_skus_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_skus_operations.py
new file mode 100644
index 00000000000..d515fa93ab1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_skus_operations.py
@@ -0,0 +1,114 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceSkusOperations(object):
+ """WorkspaceSkusOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.SkuListResult"]
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..ae42435dd95
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
@@ -0,0 +1,1041 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations(object):
+ """WorkspacesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.Workspace"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.Workspace"]
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.WorkspaceUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListWorkspaceKeysResult"
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ def _resync_keys_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resync_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _resync_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def begin_resync_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._resync_keys_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def list_notebook_access_token(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.NotebookAccessTokenResult"
+ """return notebook access token and refresh token.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: NotebookAccessTokenResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.NotebookAccessTokenResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookAccessTokenResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_access_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken'} # type: ignore
+
+ def _prepare_notebook_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.NotebookResourceInfo"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_notebook_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_notebook_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def begin_prepare_notebook(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.NotebookResourceInfo"]
+ """prepare_notebook.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._prepare_notebook_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare_notebook.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def list_storage_account_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListStorageAccountKeysResult"
+ """list_storage_account_keys.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListStorageAccountKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListStorageAccountKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListStorageAccountKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_storage_account_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_storage_account_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys'} # type: ignore
+
+ def list_notebook_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListNotebookKeysResult"
+ """list_notebook_keys.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-01-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
new file mode 100644
index 00000000000..e5aff4f83af
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/src/machinelearningservices/report.md b/src/machinelearningservices/report.md
new file mode 100644
index 00000000000..4658d655bf9
--- /dev/null
+++ b/src/machinelearningservices/report.md
@@ -0,0 +1,2539 @@
+# Azure CLI Module Creation Report
+
+## EXTENSION
+|CLI Extension|Command Groups|
+|---------|------------|
+|az machinelearningservices|[groups](#CommandGroups)
+
+## GROUPS
+### Command groups in `az machinelearningservices` extension
+|CLI Command Group|Group Swagger name|Commands|
+|---------|------------|--------|
+|az machinelearningservices workspace|Workspaces|[commands](#CommandsInWorkspaces)|
+|az machinelearningservices usage|Usages|[commands](#CommandsInUsages)|
+|az machinelearningservices virtual-machine-size|VirtualMachineSizes|[commands](#CommandsInVirtualMachineSizes)|
+|az machinelearningservices quota|Quotas|[commands](#CommandsInQuotas)|
+|az machinelearningservices compute|Compute|[commands](#CommandsInCompute)|
+|az machinelearningservices private-endpoint-connection|PrivateEndpointConnections|[commands](#CommandsInPrivateEndpointConnections)|
+|az machinelearningservices private-link-resource|PrivateLinkResources|[commands](#CommandsInPrivateLinkResources)|
+|az machinelearningservices workspace-connection|WorkspaceConnections|[commands](#CommandsInWorkspaceConnections)|
+|az machinelearningservices batch-endpoint|BatchEndpoints|[commands](#CommandsInBatchEndpoints)|
+|az machinelearningservices batch-deployment|BatchDeployments|[commands](#CommandsInBatchDeployments)|
+|az machinelearningservices code-container|CodeContainers|[commands](#CommandsInCodeContainers)|
+|az machinelearningservices code-version|CodeVersions|[commands](#CommandsInCodeVersions)|
+|az machinelearningservices data-container|DataContainers|[commands](#CommandsInDataContainers)|
+|az machinelearningservices data-version|DataVersions|[commands](#CommandsInDataVersions)|
+|az machinelearningservices datastore|Datastores|[commands](#CommandsInDatastores)|
+|az machinelearningservices environment-container|EnvironmentContainers|[commands](#CommandsInEnvironmentContainers)|
+|az machinelearningservices environment-specification-version|EnvironmentSpecificationVersions|[commands](#CommandsInEnvironmentSpecificationVersions)|
+|az machinelearningservices job|Jobs|[commands](#CommandsInJobs)|
+|az machinelearningservices labeling-job|LabelingJobs|[commands](#CommandsInLabelingJobs)|
+|az machinelearningservices model-container|ModelContainers|[commands](#CommandsInModelContainers)|
+|az machinelearningservices model-version|ModelVersions|[commands](#CommandsInModelVersions)|
+|az machinelearningservices online-endpoint|OnlineEndpoints|[commands](#CommandsInOnlineEndpoints)|
+|az machinelearningservices online-deployment|OnlineDeployments|[commands](#CommandsInOnlineDeployments)|
+|az machinelearningservices workspace-feature|WorkspaceFeatures|[commands](#CommandsInWorkspaceFeatures)|
+|az machinelearningservices workspace-sku|WorkspaceSkus|[commands](#CommandsInWorkspaceSkus)|
+
+## COMMANDS
+### Commands in `az machinelearningservices batch-deployment` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices batch-deployment list](#BatchDeploymentsList)|List|[Parameters](#ParametersBatchDeploymentsList)|[Example](#ExamplesBatchDeploymentsList)|
+|[az machinelearningservices batch-deployment show](#BatchDeploymentsGet)|Get|[Parameters](#ParametersBatchDeploymentsGet)|[Example](#ExamplesBatchDeploymentsGet)|
+|[az machinelearningservices batch-deployment create](#BatchDeploymentsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersBatchDeploymentsCreateOrUpdate#Create)|[Example](#ExamplesBatchDeploymentsCreateOrUpdate#Create)|
+|[az machinelearningservices batch-deployment update](#BatchDeploymentsUpdate)|Update|[Parameters](#ParametersBatchDeploymentsUpdate)|[Example](#ExamplesBatchDeploymentsUpdate)|
+|[az machinelearningservices batch-deployment delete](#BatchDeploymentsDelete)|Delete|[Parameters](#ParametersBatchDeploymentsDelete)|[Example](#ExamplesBatchDeploymentsDelete)|
+
+### Commands in `az machinelearningservices batch-endpoint` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices batch-endpoint list](#BatchEndpointsList)|List|[Parameters](#ParametersBatchEndpointsList)|[Example](#ExamplesBatchEndpointsList)|
+|[az machinelearningservices batch-endpoint show](#BatchEndpointsGet)|Get|[Parameters](#ParametersBatchEndpointsGet)|[Example](#ExamplesBatchEndpointsGet)|
+|[az machinelearningservices batch-endpoint create](#BatchEndpointsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersBatchEndpointsCreateOrUpdate#Create)|[Example](#ExamplesBatchEndpointsCreateOrUpdate#Create)|
+|[az machinelearningservices batch-endpoint update](#BatchEndpointsUpdate)|Update|[Parameters](#ParametersBatchEndpointsUpdate)|[Example](#ExamplesBatchEndpointsUpdate)|
+|[az machinelearningservices batch-endpoint delete](#BatchEndpointsDelete)|Delete|[Parameters](#ParametersBatchEndpointsDelete)|[Example](#ExamplesBatchEndpointsDelete)|
+|[az machinelearningservices batch-endpoint list-key](#BatchEndpointsListKeys)|ListKeys|[Parameters](#ParametersBatchEndpointsListKeys)|[Example](#ExamplesBatchEndpointsListKeys)|
+
+### Commands in `az machinelearningservices code-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices code-container list](#CodeContainersList)|List|[Parameters](#ParametersCodeContainersList)|[Example](#ExamplesCodeContainersList)|
+|[az machinelearningservices code-container show](#CodeContainersGet)|Get|[Parameters](#ParametersCodeContainersGet)|[Example](#ExamplesCodeContainersGet)|
+|[az machinelearningservices code-container create](#CodeContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersCodeContainersCreateOrUpdate#Create)|[Example](#ExamplesCodeContainersCreateOrUpdate#Create)|
+|[az machinelearningservices code-container update](#CodeContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersCodeContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices code-container delete](#CodeContainersDelete)|Delete|[Parameters](#ParametersCodeContainersDelete)|[Example](#ExamplesCodeContainersDelete)|
+
+### Commands in `az machinelearningservices code-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices code-version list](#CodeVersionsList)|List|[Parameters](#ParametersCodeVersionsList)|[Example](#ExamplesCodeVersionsList)|
+|[az machinelearningservices code-version show](#CodeVersionsGet)|Get|[Parameters](#ParametersCodeVersionsGet)|[Example](#ExamplesCodeVersionsGet)|
+|[az machinelearningservices code-version create](#CodeVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersCodeVersionsCreateOrUpdate#Create)|[Example](#ExamplesCodeVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices code-version update](#CodeVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersCodeVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices code-version delete](#CodeVersionsDelete)|Delete|[Parameters](#ParametersCodeVersionsDelete)|[Example](#ExamplesCodeVersionsDelete)|
+
+### Commands in `az machinelearningservices compute` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices compute list](#ComputeList)|List|[Parameters](#ParametersComputeList)|[Example](#ExamplesComputeList)|
+|[az machinelearningservices compute show](#ComputeGet)|Get|[Parameters](#ParametersComputeGet)|[Example](#ExamplesComputeGet)|
+|[az machinelearningservices compute create](#ComputeCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersComputeCreateOrUpdate#Create)|[Example](#ExamplesComputeCreateOrUpdate#Create)|
+|[az machinelearningservices compute update](#ComputeUpdate)|Update|[Parameters](#ParametersComputeUpdate)|[Example](#ExamplesComputeUpdate)|
+|[az machinelearningservices compute delete](#ComputeDelete)|Delete|[Parameters](#ParametersComputeDelete)|[Example](#ExamplesComputeDelete)|
+|[az machinelearningservices compute list-key](#ComputeListKeys)|ListKeys|[Parameters](#ParametersComputeListKeys)|[Example](#ExamplesComputeListKeys)|
+|[az machinelearningservices compute list-node](#ComputeListNodes)|ListNodes|[Parameters](#ParametersComputeListNodes)|[Example](#ExamplesComputeListNodes)|
+|[az machinelearningservices compute restart](#ComputeRestart)|Restart|[Parameters](#ParametersComputeRestart)|[Example](#ExamplesComputeRestart)|
+|[az machinelearningservices compute start](#ComputeStart)|Start|[Parameters](#ParametersComputeStart)|[Example](#ExamplesComputeStart)|
+|[az machinelearningservices compute stop](#ComputeStop)|Stop|[Parameters](#ParametersComputeStop)|[Example](#ExamplesComputeStop)|
+|[az machinelearningservices compute update-schedule](#ComputeUpdateSchedules)|UpdateSchedules|[Parameters](#ParametersComputeUpdateSchedules)|[Example](#ExamplesComputeUpdateSchedules)|
+
+### Commands in `az machinelearningservices data-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices data-container list](#DataContainersList)|List|[Parameters](#ParametersDataContainersList)|[Example](#ExamplesDataContainersList)|
+|[az machinelearningservices data-container show](#DataContainersGet)|Get|[Parameters](#ParametersDataContainersGet)|[Example](#ExamplesDataContainersGet)|
+|[az machinelearningservices data-container create](#DataContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDataContainersCreateOrUpdate#Create)|[Example](#ExamplesDataContainersCreateOrUpdate#Create)|
+|[az machinelearningservices data-container update](#DataContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDataContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices data-container delete](#DataContainersDelete)|Delete|[Parameters](#ParametersDataContainersDelete)|[Example](#ExamplesDataContainersDelete)|
+
+### Commands in `az machinelearningservices data-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices data-version list](#DataVersionsList)|List|[Parameters](#ParametersDataVersionsList)|[Example](#ExamplesDataVersionsList)|
+|[az machinelearningservices data-version show](#DataVersionsGet)|Get|[Parameters](#ParametersDataVersionsGet)|[Example](#ExamplesDataVersionsGet)|
+|[az machinelearningservices data-version create](#DataVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDataVersionsCreateOrUpdate#Create)|[Example](#ExamplesDataVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices data-version update](#DataVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDataVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices data-version delete](#DataVersionsDelete)|Delete|[Parameters](#ParametersDataVersionsDelete)|[Example](#ExamplesDataVersionsDelete)|
+
+### Commands in `az machinelearningservices datastore` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices datastore list](#DatastoresList)|List|[Parameters](#ParametersDatastoresList)|[Example](#ExamplesDatastoresList)|
+|[az machinelearningservices datastore show](#DatastoresGet)|Get|[Parameters](#ParametersDatastoresGet)|[Example](#ExamplesDatastoresGet)|
+|[az machinelearningservices datastore create](#DatastoresCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDatastoresCreateOrUpdate#Create)|[Example](#ExamplesDatastoresCreateOrUpdate#Create)|
+|[az machinelearningservices datastore update](#DatastoresCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatastoresCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices datastore delete](#DatastoresDelete)|Delete|[Parameters](#ParametersDatastoresDelete)|[Example](#ExamplesDatastoresDelete)|
+|[az machinelearningservices datastore list-secret](#DatastoresListSecrets)|ListSecrets|[Parameters](#ParametersDatastoresListSecrets)|[Example](#ExamplesDatastoresListSecrets)|
+
+### Commands in `az machinelearningservices environment-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices environment-container list](#EnvironmentContainersList)|List|[Parameters](#ParametersEnvironmentContainersList)|[Example](#ExamplesEnvironmentContainersList)|
+|[az machinelearningservices environment-container show](#EnvironmentContainersGet)|Get|[Parameters](#ParametersEnvironmentContainersGet)|[Example](#ExamplesEnvironmentContainersGet)|
+|[az machinelearningservices environment-container create](#EnvironmentContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersEnvironmentContainersCreateOrUpdate#Create)|[Example](#ExamplesEnvironmentContainersCreateOrUpdate#Create)|
+|[az machinelearningservices environment-container update](#EnvironmentContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersEnvironmentContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices environment-container delete](#EnvironmentContainersDelete)|Delete|[Parameters](#ParametersEnvironmentContainersDelete)|[Example](#ExamplesEnvironmentContainersDelete)|
+
+### Commands in `az machinelearningservices environment-specification-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices environment-specification-version list](#EnvironmentSpecificationVersionsList)|List|[Parameters](#ParametersEnvironmentSpecificationVersionsList)|[Example](#ExamplesEnvironmentSpecificationVersionsList)|
+|[az machinelearningservices environment-specification-version show](#EnvironmentSpecificationVersionsGet)|Get|[Parameters](#ParametersEnvironmentSpecificationVersionsGet)|[Example](#ExamplesEnvironmentSpecificationVersionsGet)|
+|[az machinelearningservices environment-specification-version create](#EnvironmentSpecificationVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersEnvironmentSpecificationVersionsCreateOrUpdate#Create)|[Example](#ExamplesEnvironmentSpecificationVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices environment-specification-version update](#EnvironmentSpecificationVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersEnvironmentSpecificationVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices environment-specification-version delete](#EnvironmentSpecificationVersionsDelete)|Delete|[Parameters](#ParametersEnvironmentSpecificationVersionsDelete)|[Example](#ExamplesEnvironmentSpecificationVersionsDelete)|
+
+### Commands in `az machinelearningservices job` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices job list](#JobsList)|List|[Parameters](#ParametersJobsList)|[Example](#ExamplesJobsList)|
+|[az machinelearningservices job show](#JobsGet)|Get|[Parameters](#ParametersJobsGet)|[Example](#ExamplesJobsGet)|
+|[az machinelearningservices job create](#JobsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersJobsCreateOrUpdate#Create)|[Example](#ExamplesJobsCreateOrUpdate#Create)|
+|[az machinelearningservices job update](#JobsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersJobsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices job delete](#JobsDelete)|Delete|[Parameters](#ParametersJobsDelete)|[Example](#ExamplesJobsDelete)|
+|[az machinelearningservices job cancel](#JobsCancel)|Cancel|[Parameters](#ParametersJobsCancel)|[Example](#ExamplesJobsCancel)|
+
+### Commands in `az machinelearningservices labeling-job` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices labeling-job list](#LabelingJobsList)|List|[Parameters](#ParametersLabelingJobsList)|[Example](#ExamplesLabelingJobsList)|
+|[az machinelearningservices labeling-job show](#LabelingJobsGet)|Get|[Parameters](#ParametersLabelingJobsGet)|[Example](#ExamplesLabelingJobsGet)|
+|[az machinelearningservices labeling-job create](#LabelingJobsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLabelingJobsCreateOrUpdate#Create)|[Example](#ExamplesLabelingJobsCreateOrUpdate#Create)|
+|[az machinelearningservices labeling-job update](#LabelingJobsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLabelingJobsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices labeling-job delete](#LabelingJobsDelete)|Delete|[Parameters](#ParametersLabelingJobsDelete)|[Example](#ExamplesLabelingJobsDelete)|
+|[az machinelearningservices labeling-job export-label](#LabelingJobsExportLabels)|ExportLabels|[Parameters](#ParametersLabelingJobsExportLabels)|[Example](#ExamplesLabelingJobsExportLabels)|
+|[az machinelearningservices labeling-job pause](#LabelingJobsPause)|Pause|[Parameters](#ParametersLabelingJobsPause)|[Example](#ExamplesLabelingJobsPause)|
+|[az machinelearningservices labeling-job resume](#LabelingJobsResume)|Resume|[Parameters](#ParametersLabelingJobsResume)|[Example](#ExamplesLabelingJobsResume)|
+
+### Commands in `az machinelearningservices model-container` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices model-container list](#ModelContainersList)|List|[Parameters](#ParametersModelContainersList)|[Example](#ExamplesModelContainersList)|
+|[az machinelearningservices model-container show](#ModelContainersGet)|Get|[Parameters](#ParametersModelContainersGet)|[Example](#ExamplesModelContainersGet)|
+|[az machinelearningservices model-container create](#ModelContainersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersModelContainersCreateOrUpdate#Create)|[Example](#ExamplesModelContainersCreateOrUpdate#Create)|
+|[az machinelearningservices model-container update](#ModelContainersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersModelContainersCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices model-container delete](#ModelContainersDelete)|Delete|[Parameters](#ParametersModelContainersDelete)|[Example](#ExamplesModelContainersDelete)|
+
+### Commands in `az machinelearningservices model-version` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices model-version list](#ModelVersionsList)|List|[Parameters](#ParametersModelVersionsList)|[Example](#ExamplesModelVersionsList)|
+|[az machinelearningservices model-version show](#ModelVersionsGet)|Get|[Parameters](#ParametersModelVersionsGet)|[Example](#ExamplesModelVersionsGet)|
+|[az machinelearningservices model-version create](#ModelVersionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersModelVersionsCreateOrUpdate#Create)|[Example](#ExamplesModelVersionsCreateOrUpdate#Create)|
+|[az machinelearningservices model-version update](#ModelVersionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersModelVersionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices model-version delete](#ModelVersionsDelete)|Delete|[Parameters](#ParametersModelVersionsDelete)|[Example](#ExamplesModelVersionsDelete)|
+
+### Commands in `az machinelearningservices online-deployment` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices online-deployment list](#OnlineDeploymentsList)|List|[Parameters](#ParametersOnlineDeploymentsList)|[Example](#ExamplesOnlineDeploymentsList)|
+|[az machinelearningservices online-deployment show](#OnlineDeploymentsGet)|Get|[Parameters](#ParametersOnlineDeploymentsGet)|[Example](#ExamplesOnlineDeploymentsGet)|
+|[az machinelearningservices online-deployment create](#OnlineDeploymentsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersOnlineDeploymentsCreateOrUpdate#Create)|[Example](#ExamplesOnlineDeploymentsCreateOrUpdate#Create)|
+|[az machinelearningservices online-deployment update](#OnlineDeploymentsUpdate)|Update|[Parameters](#ParametersOnlineDeploymentsUpdate)|[Example](#ExamplesOnlineDeploymentsUpdate)|
+|[az machinelearningservices online-deployment delete](#OnlineDeploymentsDelete)|Delete|[Parameters](#ParametersOnlineDeploymentsDelete)|[Example](#ExamplesOnlineDeploymentsDelete)|
+|[az machinelearningservices online-deployment get-log](#OnlineDeploymentsGetLogs)|GetLogs|[Parameters](#ParametersOnlineDeploymentsGetLogs)|[Example](#ExamplesOnlineDeploymentsGetLogs)|
+
+### Commands in `az machinelearningservices online-endpoint` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices online-endpoint list](#OnlineEndpointsList)|List|[Parameters](#ParametersOnlineEndpointsList)|[Example](#ExamplesOnlineEndpointsList)|
+|[az machinelearningservices online-endpoint show](#OnlineEndpointsGet)|Get|[Parameters](#ParametersOnlineEndpointsGet)|[Example](#ExamplesOnlineEndpointsGet)|
+|[az machinelearningservices online-endpoint create](#OnlineEndpointsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersOnlineEndpointsCreateOrUpdate#Create)|[Example](#ExamplesOnlineEndpointsCreateOrUpdate#Create)|
+|[az machinelearningservices online-endpoint update](#OnlineEndpointsUpdate)|Update|[Parameters](#ParametersOnlineEndpointsUpdate)|[Example](#ExamplesOnlineEndpointsUpdate)|
+|[az machinelearningservices online-endpoint delete](#OnlineEndpointsDelete)|Delete|[Parameters](#ParametersOnlineEndpointsDelete)|[Example](#ExamplesOnlineEndpointsDelete)|
+|[az machinelearningservices online-endpoint get-token](#OnlineEndpointsGetToken)|GetToken|[Parameters](#ParametersOnlineEndpointsGetToken)|[Example](#ExamplesOnlineEndpointsGetToken)|
+|[az machinelearningservices online-endpoint list-key](#OnlineEndpointsListKeys)|ListKeys|[Parameters](#ParametersOnlineEndpointsListKeys)|[Example](#ExamplesOnlineEndpointsListKeys)|
+|[az machinelearningservices online-endpoint regenerate-key](#OnlineEndpointsRegenerateKeys)|RegenerateKeys|[Parameters](#ParametersOnlineEndpointsRegenerateKeys)|[Example](#ExamplesOnlineEndpointsRegenerateKeys)|
+
+### Commands in `az machinelearningservices private-endpoint-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-endpoint-connection list](#PrivateEndpointConnectionsList)|List|[Parameters](#ParametersPrivateEndpointConnectionsList)|[Example](#ExamplesPrivateEndpointConnectionsList)|
+|[az machinelearningservices private-endpoint-connection show](#PrivateEndpointConnectionsGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionsGet)|[Example](#ExamplesPrivateEndpointConnectionsGet)|
+|[az machinelearningservices private-endpoint-connection create](#PrivateEndpointConnectionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPrivateEndpointConnectionsCreateOrUpdate#Create)|[Example](#ExamplesPrivateEndpointConnectionsCreateOrUpdate#Create)|
+|[az machinelearningservices private-endpoint-connection update](#PrivateEndpointConnectionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPrivateEndpointConnectionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices private-endpoint-connection delete](#PrivateEndpointConnectionsDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionsDelete)|[Example](#ExamplesPrivateEndpointConnectionsDelete)|
+
+### Commands in `az machinelearningservices private-link-resource` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-link-resource list](#PrivateLinkResourcesList)|List|[Parameters](#ParametersPrivateLinkResourcesList)|[Example](#ExamplesPrivateLinkResourcesList)|
+
+### Commands in `az machinelearningservices quota` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices quota list](#QuotasList)|List|[Parameters](#ParametersQuotasList)|[Example](#ExamplesQuotasList)|
+|[az machinelearningservices quota update](#QuotasUpdate)|Update|[Parameters](#ParametersQuotasUpdate)|[Example](#ExamplesQuotasUpdate)|
+
+### Commands in `az machinelearningservices usage` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices usage list](#UsagesList)|List|[Parameters](#ParametersUsagesList)|[Example](#ExamplesUsagesList)|
+
+### Commands in `az machinelearningservices virtual-machine-size` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices virtual-machine-size list](#VirtualMachineSizesList)|List|[Parameters](#ParametersVirtualMachineSizesList)|[Example](#ExamplesVirtualMachineSizesList)|
+
+### Commands in `az machinelearningservices workspace` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace list](#WorkspacesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersWorkspacesListByResourceGroup)|[Example](#ExamplesWorkspacesListByResourceGroup)|
+|[az machinelearningservices workspace list](#WorkspacesListBySubscription)|ListBySubscription|[Parameters](#ParametersWorkspacesListBySubscription)|[Example](#ExamplesWorkspacesListBySubscription)|
+|[az machinelearningservices workspace show](#WorkspacesGet)|Get|[Parameters](#ParametersWorkspacesGet)|[Example](#ExamplesWorkspacesGet)|
+|[az machinelearningservices workspace create](#WorkspacesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersWorkspacesCreateOrUpdate#Create)|[Example](#ExamplesWorkspacesCreateOrUpdate#Create)|
+|[az machinelearningservices workspace update](#WorkspacesUpdate)|Update|[Parameters](#ParametersWorkspacesUpdate)|[Example](#ExamplesWorkspacesUpdate)|
+|[az machinelearningservices workspace delete](#WorkspacesDelete)|Delete|[Parameters](#ParametersWorkspacesDelete)|[Example](#ExamplesWorkspacesDelete)|
+|[az machinelearningservices workspace list-key](#WorkspacesListKeys)|ListKeys|[Parameters](#ParametersWorkspacesListKeys)|[Example](#ExamplesWorkspacesListKeys)|
+|[az machinelearningservices workspace list-notebook-access-token](#WorkspacesListNotebookAccessToken)|ListNotebookAccessToken|[Parameters](#ParametersWorkspacesListNotebookAccessToken)|[Example](#ExamplesWorkspacesListNotebookAccessToken)|
+|[az machinelearningservices workspace list-notebook-key](#WorkspacesListNotebookKeys)|ListNotebookKeys|[Parameters](#ParametersWorkspacesListNotebookKeys)|[Example](#ExamplesWorkspacesListNotebookKeys)|
+|[az machinelearningservices workspace list-storage-account-key](#WorkspacesListStorageAccountKeys)|ListStorageAccountKeys|[Parameters](#ParametersWorkspacesListStorageAccountKeys)|[Example](#ExamplesWorkspacesListStorageAccountKeys)|
+|[az machinelearningservices workspace prepare-notebook](#WorkspacesPrepareNotebook)|PrepareNotebook|[Parameters](#ParametersWorkspacesPrepareNotebook)|[Example](#ExamplesWorkspacesPrepareNotebook)|
+|[az machinelearningservices workspace resync-key](#WorkspacesResyncKeys)|ResyncKeys|[Parameters](#ParametersWorkspacesResyncKeys)|[Example](#ExamplesWorkspacesResyncKeys)|
+
+### Commands in `az machinelearningservices workspace-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-connection list](#WorkspaceConnectionsList)|List|[Parameters](#ParametersWorkspaceConnectionsList)|[Example](#ExamplesWorkspaceConnectionsList)|
+|[az machinelearningservices workspace-connection show](#WorkspaceConnectionsGet)|Get|[Parameters](#ParametersWorkspaceConnectionsGet)|[Example](#ExamplesWorkspaceConnectionsGet)|
+|[az machinelearningservices workspace-connection create](#WorkspaceConnectionsCreate)|Create|[Parameters](#ParametersWorkspaceConnectionsCreate)|[Example](#ExamplesWorkspaceConnectionsCreate)|
+|[az machinelearningservices workspace-connection delete](#WorkspaceConnectionsDelete)|Delete|[Parameters](#ParametersWorkspaceConnectionsDelete)|[Example](#ExamplesWorkspaceConnectionsDelete)|
+
+### Commands in `az machinelearningservices workspace-feature` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-feature list](#WorkspaceFeaturesList)|List|[Parameters](#ParametersWorkspaceFeaturesList)|[Example](#ExamplesWorkspaceFeaturesList)|
+
+### Commands in `az machinelearningservices workspace-sku` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-sku list](#WorkspaceSkusList)|List|[Parameters](#ParametersWorkspaceSkusList)|[Example](#ExamplesWorkspaceSkusList)|
+
+
+## COMMAND DETAILS
+
+### group `az machinelearningservices batch-deployment`
+#### Command `az machinelearningservices batch-deployment list`
+
+##### Example
+```
+az machinelearningservices batch-deployment list --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Endpoint name|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Top of list.|top|$top|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices batch-deployment show`
+
+##### Example
+```
+az machinelearningservices batch-deployment show --deployment-name "testBatchDeployment" --endpoint-name \
+"testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Endpoint name|endpoint_name|endpointName|
+|**--deployment-name**|string|The identifier for the Batch deployments.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices batch-deployment create`
+
+##### Example
+```
+az machinelearningservices batch-deployment create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties description="string" codeConfiguration={"codeId":"/subscriptions/00000000-111\
+1-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testw\
+orkspace/codes/testcode/versions/1","scoringScript":"score.py"} compute={"instanceCount":0,"instanceType":"string","isL\
+ocal":false,"location":"string","properties":{"additionalProp1":"string","additionalProp2":"string","additionalProp3":"\
+string"},"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Micr\
+osoft.MachineLearningServices/workspaces/testworkspace/computes/testcompute"} environmentId="/subscriptions/00000000-11\
+11-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/test\
+workspace/environments/myenv" environmentVariables={"additionalProp1":"string","additionalProp2":"string","additionalPr\
+op3":"string"} errorThreshold=0 loggingLevel="Info" miniBatchSize=0 model={"assetId":"/subscriptions/00000000-1111-2222\
+-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspa\
+ce/models/testmodel/versions/1","referenceType":"Id"} outputConfiguration={"appendRowFileName":"string","outputAction":\
+"SummaryOnly"} partitionKeys="string" properties={"additionalProp1":"string","additionalProp2":"string","additionalProp\
+3":"string"} retrySettings={"maxRetries":0,"timeout":"string"} --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testBatchDeployment" --endpoint-name \
+"testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name|endpoint_name|endpointName|
+|**--deployment-name**|string|The identifier for the Batch inference deployment.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|The geo-location where the resource lives|location|location|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--code-configuration**|object|Code configuration for the endpoint deployment.|code_configuration|codeConfiguration|
+|**--description**|string|Description of the endpoint deployment.|description|description|
+|**--environment-id**|string|ARM resource ID of the environment specification for the endpoint deployment.|environment_id|environmentId|
+|**--environment-variables**|dictionary|Environment variables configuration for the deployment.|environment_variables|environmentVariables|
+|**--error-threshold**|integer|Error threshold, if the error count for the entire input goes above this value, the batch inference will be aborted. Range is [-1, int.MaxValue]. For FileDataset, this value is the count of file failures. For TabularDataset, this value is the count of record failures. If set to -1 (the lower bound), all failures during batch inference will be ignored.|error_threshold|errorThreshold|
+|**--logging-level**|choice|Logging level for batch inference operation.|logging_level|loggingLevel|
+|**--mini-batch-size**|integer|Size of the mini-batch passed to each batch invocation. For FileDataset, this is the number of files per mini-batch. For TabularDataset, this is the size of the records in bytes, per mini-batch.|mini_batch_size|miniBatchSize|
+|**--data-path-asset-reference**|object|Reference to an asset via its path in a datastore.|data_path_asset_reference|DataPathAssetReference|
+|**--id-asset-reference**|object|Reference to an asset via its ARM resource ID.|id_asset_reference|IdAssetReference|
+|**--output-path-asset-reference**|object|Reference to an asset via its path in a job output.|output_path_asset_reference|OutputPathAssetReference|
+|**--output-configuration**|object|Output configuration for the batch inference operation.|output_configuration|outputConfiguration|
+|**--partition-keys**|array|Partition keys list used for Named partitioning.|partition_keys|partitionKeys|
+|**--properties**|dictionary|Property dictionary. Properties can be added, but not removed or altered.|properties|properties|
+|**--retry-settings**|object|Retry Settings for the batch inference operation.|retry_settings|retrySettings|
+|**--instance-count**|integer|Number of instances or nodes.|instance_count|instanceCount|
+|**--instance-type**|string|SKU type to run on.|instance_type|instanceType|
+|**--is-local**|boolean|Set to true for jobs running on local compute.|is_local|isLocal|
+|**--compute-configuration-location**|string|Location for virtual cluster run.|compute_configuration_location|location|
+|**--compute-configuration-properties**|dictionary|Additional properties.|compute_configuration_properties|properties|
+|**--target**|string|ARM resource ID of the compute resource.|target|target|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices batch-deployment update`
+
+##### Example
+```
+az machinelearningservices batch-deployment update --tags additionalProp1="string" additionalProp2="string" \
+additionalProp3="string" --deployment-name "testBatchDeployment" --endpoint-name "testBatchEndpoint" --resource-group \
+"testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name|endpoint_name|endpointName|
+|**--deployment-name**|string|The identifier for the Batch inference deployment.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--location**|string|The geo-location where the resource lives.|location|location|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--description**|string|Description of the endpoint deployment.|description|description|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices batch-deployment delete`
+
+##### Example
+```
+az machinelearningservices batch-deployment delete --deployment-name "testBatchDeployment" --endpoint-name \
+"testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Endpoint name|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference deployment identifier.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices batch-endpoint`
+#### Command `az machinelearningservices batch-endpoint list`
+
+##### Example
+```
+az machinelearningservices batch-endpoint list --count 1 --resource-group "resourceGroup-1234" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--count**|integer|Number of endpoints to be retrieved in a page of results.|count|count|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices batch-endpoint show`
+
+##### Example
+```
+az machinelearningservices batch-endpoint show --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Name for the Batch Endpoint.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices batch-endpoint create`
+
+##### Example
+```
+az machinelearningservices batch-endpoint create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties description="string" authMode="AMLToken" keys={"primaryKey":"string","seconda\
+ryKey":"string"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+traffic={"myDeployment1":0,"myDeployment2":1} --tags additionalProp1="string" additionalProp2="string" \
+additionalProp3="string" --endpoint-name "testBatchEndpoint" --resource-group "resourceGroup-1234" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Name for the Batch inference endpoint.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|The geo-location where the resource lives|location|location|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--auth-mode**|choice|Enum to determine endpoint authentication mode.|auth_mode|authMode|
+|**--description**|string|Description of the inference endpoint.|description|description|
+|**--keys**|object|EndpointAuthKeys to set initially on an Endpoint. This property will always be returned as null. AuthKey values must be retrieved using the ListKeys API.|keys|keys|
+|**--properties**|dictionary|Property dictionary. Properties can be added, but not removed or altered.|properties|properties|
+|**--traffic**|dictionary|Traffic rules on how the traffic will be routed across deployments.|traffic|traffic|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices batch-endpoint update`
+
+##### Example
+```
+az machinelearningservices batch-endpoint update --tags additionalProp1="string" additionalProp2="string" \
+additionalProp3="string" --endpoint-name "testBatchEndpoint" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Name for the Batch inference endpoint.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--location**|string|The geo-location where the resource lives.|location|location|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--traffic**|dictionary|Traffic rules on how the traffic will be routed across deployments.|traffic|traffic|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices batch-endpoint delete`
+
+##### Example
+```
+az machinelearningservices batch-endpoint delete --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices batch-endpoint list-key`
+
+##### Example
+```
+az machinelearningservices batch-endpoint list-key --endpoint-name "testBatchEndpoint" --resource-group \
+"resourceGroup-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices code-container`
+#### Command `az machinelearningservices code-container list`
+
+##### Example
+```
+az machinelearningservices code-container list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices code-container show`
+
+##### Example
+```
+az machinelearningservices code-container show --name "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices code-container create`
+
+##### Example
+```
+az machinelearningservices code-container create --name "testContainer" --properties description="string" \
+tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices code-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices code-container delete`
+
+##### Example
+```
+az machinelearningservices code-container delete --name "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices code-version`
+#### Command `az machinelearningservices code-version list`
+
+##### Example
+```
+az machinelearningservices code-version list --name "testContainer" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices code-version show`
+
+##### Example
+```
+az machinelearningservices code-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices code-version create`
+
+##### Example
+```
+az machinelearningservices code-version create --name "testContainer" --properties path="path/to/file.py" \
+description="string" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234\
+/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastores/mydatastore" isAnonymous=true \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--path**|string|The path of the file/directory in the datastore.|path|path|
+|**--datastore-id**|string|ARM resource ID of the datastore where the asset is located.|datastore_id|datastoreId|
+|**--description**|string|The asset description text.|description|description|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices code-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--path**|string|The path of the file/directory in the datastore.|path|path|
+|**--datastore-id**|string|ARM resource ID of the datastore where the asset is located.|datastore_id|datastoreId|
+|**--description**|string|The asset description text.|description|description|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices code-version delete`
+
+##### Example
+```
+az machinelearningservices code-version delete --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices compute`
+#### Command `az machinelearningservices compute list`
+
+##### Example
+```
+az machinelearningservices compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices compute show`
+
+##### Example
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute create`
+
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AKS\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osT\
+ype\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"\
+minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0\
+0000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery\
+/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"DataFactory\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeIns\
+tanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"0\
+0000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\
+\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeIns\
+tanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"0\
+0000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"schedules\\":{\\"\
+computeStartStop\\":[{\\"action\\":\\"Stop\\",\\"cron\\":{\\"expression\\":\\"0 18 * * *\\",\\"startTime\\":\\"2021-04-\
+23T01:30:00\\",\\"timeZone\\":\\"Pacific Standard Time\\"},\\"status\\":\\"Enabled\\",\\"triggerType\\":\\"Cron\\"}]},\
+\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STA\
+NDARD_NC6\\"}}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"vmSize\\":\\"STANDARD_NC6\\"}}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--properties**|object|Compute properties|properties|properties|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices compute update`
+
+##### Example
+```
+az machinelearningservices compute update --name "compute123" --scale-settings max-node-count=4 min-node-count=4 \
+node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--scale-settings**|object|Desired scale settings for the amlCompute.|scale_settings|scaleSettings|
+
+#### Command `az machinelearningservices compute delete`
+
+##### Example
+```
+az machinelearningservices compute delete --name "compute123" --resource-group "testrg123" \
+--underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--underlying-resource-action**|choice|Delete the underlying compute if 'Delete', or detach the underlying compute from workspace if 'Detach'.|underlying_resource_action|underlyingResourceAction|
+
+#### Command `az machinelearningservices compute list-key`
+
+##### Example
+```
+az machinelearningservices compute list-key --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute list-node`
+
+##### Example
+```
+az machinelearningservices compute list-node --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute restart`
+
+##### Example
+```
+az machinelearningservices compute restart --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute start`
+
+##### Example
+```
+az machinelearningservices compute start --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute stop`
+
+##### Example
+```
+az machinelearningservices compute stop --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute update-schedule`
+
+##### Example
+```
+az machinelearningservices compute update-schedule --name "compute123" --compute-start-stop \
+"[{\\"action\\":\\"Start\\",\\"recurrence\\":{\\"frequency\\":\\"Day\\",\\"interval\\":1,\\"schedule\\":{\\"hours\\":[1\
+8],\\"minutes\\":[30],\\"weekDays\\":null},\\"startTime\\":\\"2021-04-23T01:30:00\\",\\"timeZone\\":\\"Pacific \
+Standard Time\\"},\\"status\\":\\"Enabled\\",\\"triggerType\\":\\"Recurrence\\"}]" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--compute-start-stop**|array|The list of compute start stop schedules to be applied.|compute_start_stop|computeStartStop|
+
+### group `az machinelearningservices data-container`
+#### Command `az machinelearningservices data-container list`
+
+##### Example
+```
+az machinelearningservices data-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices data-container show`
+
+##### Example
+```
+az machinelearningservices data-container show --name "datacontainer123" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices data-container create`
+
+##### Example
+```
+az machinelearningservices data-container create --name "datacontainer123" --properties description="string" \
+properties={"properties1":"value1","properties2":"value2"} tags={"tag1":"value1","tag2":"value2"} --resource-group \
+"testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices data-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices data-container delete`
+
+##### Example
+```
+az machinelearningservices data-container delete --name "datacontainer123" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices data-version`
+#### Command `az machinelearningservices data-version list`
+
+##### Example
+```
+az machinelearningservices data-version list --name "dataset123" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Data name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--tags**|string|Comma-separated list of tag names (and optionally values). Example: tag1,tag2=value2|tags|$tags|
+
+#### Command `az machinelearningservices data-version show`
+
+##### Example
+```
+az machinelearningservices data-version show --name "dataset123" --resource-group "testrg123" --version "1" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices data-version create`
+
+##### Example
+```
+az machinelearningservices data-version create --name "dataset123" --properties path="path/to/file.csv" \
+description="string" datasetType="Simple" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGrou\
+ps/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastores/mydatastore" \
+isAnonymous=true properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--version "1" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--path**|string|The path of the file/directory in the datastore.|path|path|
+|**--dataset-type**|choice|The Format of dataset.|dataset_type|datasetType|
+|**--datastore-id**|string|ARM resource ID of the datastore where the asset is located.|datastore_id|datastoreId|
+|**--description**|string|The asset description text.|description|description|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices data-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--path**|string|The path of the file/directory in the datastore.|path|path|
+|**--dataset-type**|choice|The Format of dataset.|dataset_type|datasetType|
+|**--datastore-id**|string|ARM resource ID of the datastore where the asset is located.|datastore_id|datastoreId|
+|**--description**|string|The asset description text.|description|description|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices data-version delete`
+
+##### Example
+```
+az machinelearningservices data-version delete --name "dataset123" --resource-group "testrg123" --version "1" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices datastore`
+#### Command `az machinelearningservices datastore list`
+
+##### Example
+```
+az machinelearningservices datastore list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--count**|integer|Maximum number of results to return.|count|count|
+|**--is-default**|boolean|Filter down to the workspace default datastore.|is_default|isDefault|
+|**--names**|array|Names of datastores to return.|names|names|
+|**--search-text**|string|Text to search for in the datastore names.|search_text|searchText|
+|**--order-by**|string|Order by property (createdtime | modifiedtime | name).|order_by|orderBy|
+|**--order-by-asc**|boolean|Order by property in ascending order.|order_by_asc|orderByAsc|
+
+#### Command `az machinelearningservices datastore show`
+
+##### Example
+```
+az machinelearningservices datastore show --name "testDatastore" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices datastore create`
+
+##### Example
+```
+az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"contentsType":"AzureDataLakeGen1","credentials":{"authorityUrl":"string","clientId":"00000000-1111-2222-3333\
+-444444444444","credentialsType":"ServicePrincipal","resourceUri":"string","secrets":{"clientSecret":"string","secretsT\
+ype":"ServicePrincipal"},"tenantId":"00000000-1111-2222-3333-444444444444"},"storeName":"testStore"} isDefault=true \
+linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} properties={"additionalProp1":"string\
+","additionalProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","a\
+dditionalProp3":"string"} --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"accountName":"string","containerName":"string","contentsType":"AzureBlob","credentials":{"authorityUrl":"str\
+ing","clientId":"00000000-1111-2222-3333-444444444444","credentialsType":"ServicePrincipal","resourceUri":"string","sec\
+rets":{"clientSecret":"string","secretsType":"ServicePrincipal"},"tenantId":"00000000-1111-2222-3333-444444444444"},"en\
+dpoint":"core.windows.net","protocol":"https"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"str\
+ing","origin":"Synapse"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"accountName":"string","containerName":"string","contentsType":"AzureFile","credentials":{"credentialsType":"\
+AccountKey","secrets":{"key":"string","secretsType":"AccountKey"}},"endpoint":"core.windows.net","protocol":"https"} \
+isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"contentsType":"AzurePostgreSql","credentials":{"credentialsType":"SqlAdmin","secrets":{"password":"string","\
+secretsType":"SqlAdmin"},"userId":"string"},"databaseName":"string","enableSSL":true,"endpoint":"string","portNumber":1\
+23,"serverName":"string"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synaps\
+e"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"contentsType":"AzureSqlDatabase","credentials":{"credentialsType":"SqlAdmin","secrets":{"password":"string",\
+"secretsType":"SqlAdmin"},"userId":"string"},"databaseName":"string","endpoint":"string","portNumber":123,"serverName":\
+"string"} isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices datastore create --name "testDatastore" --properties description="string" \
+contents={"accountName":"string","containerName":"string","contentsType":"AzureBlob","credentials":{"credentialsType":"\
+AccountKey","secrets":{"key":"string","secretsType":"AccountKey"}},"endpoint":"core.windows.net","protocol":"https"} \
+isDefault=true linkedInfo={"linkedId":"string","linkedResourceName":"string","origin":"Synapse"} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--contents**|object|Reference to the datastore storage contents.|contents|contents|
+|**--skip-validation**|boolean|Flag to skip validation.|skip_validation|skipValidation|
+|**--description**|string|The asset description text.|description|description|
+|**--is-default**|boolean|Whether this datastore is the default for the workspace.|is_default|isDefault|
+|**--linked-info**|object|Information about the datastore origin, if linked.|linked_info|linkedInfo|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices datastore update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--contents**|object|Reference to the datastore storage contents.|contents|contents|
+|**--skip-validation**|boolean|Flag to skip validation.|skip_validation|skipValidation|
+|**--description**|string|The asset description text.|description|description|
+|**--is-default**|boolean|Whether this datastore is the default for the workspace.|is_default|isDefault|
+|**--linked-info**|object|Information about the datastore origin, if linked.|linked_info|linkedInfo|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices datastore delete`
+
+##### Example
+```
+az machinelearningservices datastore delete --name "testDatastore" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices datastore list-secret`
+
+##### Example
+```
+az machinelearningservices datastore list-secret --name "testDatastore" --resource-group "testrg123" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Datastore name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices environment-container`
+#### Command `az machinelearningservices environment-container list`
+
+##### Example
+```
+az machinelearningservices environment-container list --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices environment-container show`
+
+##### Example
+```
+az machinelearningservices environment-container show --name "testEnvironment" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices environment-container create`
+
+##### Example
+```
+az machinelearningservices environment-container create --name "testEnvironment" --properties description="string" \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices environment-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices environment-container delete`
+
+##### Example
+```
+az machinelearningservices environment-container delete --name "testContainer" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices environment-specification-version`
+#### Command `az machinelearningservices environment-specification-version list`
+
+##### Example
+```
+az machinelearningservices environment-specification-version list --name "testEnvironment" --resource-group \
+"testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices environment-specification-version show`
+
+##### Example
+```
+az machinelearningservices environment-specification-version show --name "testEnvironment" --resource-group \
+"testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices environment-specification-version create`
+
+##### Example
+```
+az machinelearningservices environment-specification-version create --name "testEnvironment" --properties \
+description="string" condaFile="channels:\\n- defaults\\ndependencies:\\n- python=3.7.7\\nname: my-env" \
+docker={"dockerSpecificationType":"Build","dockerfile":"FROM myimage"} properties={"additionalProp1":"string","addition\
+alProp2":"string","additionalProp3":"string"} tags={"additionalProp1":"string","additionalProp2":"string","additionalPr\
+op3":"string"} --resource-group "testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Name of EnvironmentSpecificationVersion.|name|name|
+|**--version**|string|Version of EnvironmentSpecificationVersion.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--conda-file**|string|Standard configuration file used by Conda that lets you install any kind of package, including Python, R, and C/C++ packages. |conda_file|condaFile|
+|**--description**|string|The asset description text.|description|description|
+|**--docker-build**|object|Class to represent configuration settings for Docker Build|docker_build|DockerBuild|
+|**--docker-image**|object|Class to represent configuration settings for Docker Build|docker_image|DockerImage|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--liveness-route**|object|The route to check the liveness of the inference server container.|liveness_route|livenessRoute|
+|**--readiness-route**|object|The route to check the readiness of the inference server container.|readiness_route|readinessRoute|
+|**--scoring-route**|object|The port to send the scoring requests to, within the inference server container.|scoring_route|scoringRoute|
+
+#### Command `az machinelearningservices environment-specification-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Name of EnvironmentSpecificationVersion.|name|name|
+|**--version**|string|Version of EnvironmentSpecificationVersion.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--conda-file**|string|Standard configuration file used by Conda that lets you install any kind of package, including Python, R, and C/C++ packages. |conda_file|condaFile|
+|**--description**|string|The asset description text.|description|description|
+|**--docker-build**|object|Class to represent configuration settings for Docker Build|docker_build|DockerBuild|
+|**--docker-image**|object|Class to represent configuration settings for Docker Build|docker_image|DockerImage|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--liveness-route**|object|The route to check the liveness of the inference server container.|liveness_route|livenessRoute|
+|**--readiness-route**|object|The route to check the readiness of the inference server container.|readiness_route|readinessRoute|
+|**--scoring-route**|object|The port to send the scoring requests to, within the inference server container.|scoring_route|scoringRoute|
+
+#### Command `az machinelearningservices environment-specification-version delete`
+
+##### Example
+```
+az machinelearningservices environment-specification-version delete --name "testContainer" --resource-group \
+"testrg123" --version "1" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices job`
+#### Command `az machinelearningservices job list`
+
+##### Example
+```
+az machinelearningservices job list --job-type "Command" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices job list --job-type "Sweep" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--job-type**|string|Type of job to be returned.|job_type|jobType|
+|**--tags**|string|Tags for job to be returned.|tags|tags|
+|**--tag**|string|Jobs returned will have this tag key.|tag|tag|
+
+#### Command `az machinelearningservices job show`
+
+##### Example
+```
+az machinelearningservices job show --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices job show --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices job create`
+
+##### Example
+```
+az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"codeId\\":\\"/subscriptions/0000\
+0000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspac\
+es/testworkspace/codes/mycode/versions/1\\",\\"command\\":\\"python file.py test\\",\\"compute\\":{\\"instanceCount\\":\
+1,\\"target\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Micro\
+soft.MachineLearningServices/workspaces/testworkspace/computes/mycompute\\"},\\"distribution\\":{\\"distributionType\\"\
+:\\"PyTorch\\",\\"processCount\\":2},\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourc\
+eGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/AzureML-Tu\
+torial/versions/1\\",\\"environmentVariables\\":{\\"MY_ENV_VAR1\\":\\"string\\",\\"MY_ENV_VAR2\\":\\"string\\"},\\"expe\
+rimentName\\":\\"myExperiment\\",\\"identity\\":{\\"identityType\\":\\"AMLToken\\"},\\"inputDataBindings\\":{\\"test\\"\
+:{\\"dataId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Micro\
+soft.MachineLearningServices/workspaces/testworkspace/data/mydataset/versions/1\\",\\"pathOnCompute\\":\\"path/on/compu\
+te\\"}},\\"jobType\\":\\"Command\\",\\"outputDataBindings\\":{\\"test\\":{\\"datastoreId\\":\\"/subscriptions/00000000-\
+1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/te\
+stworkspace/datastore/mydatastore\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"properties\\":{\\"additionalProp1\\\
+":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"tags\\":{\\"additionalProp1\\"\
+:\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"timeout\\":\\"PT1M\\"}" --id \
+"testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Example
+```
+az machinelearningservices job create --properties "{\\"description\\":\\"string\\",\\"algorithm\\":\\"Grid\\",\\"compu\
+te\\":{\\"instanceCount\\":1,\\"target\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourc\
+eGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mycompute\\"},\\"identity\\":\
+{\\"identityType\\":\\"AMLToken\\"},\\"jobType\\":\\"Sweep\\",\\"maxConcurrentTrials\\":1,\\"maxTotalTrials\\":1,\\"obj\
+ective\\":{\\"goal\\":\\"Minimize\\",\\"primaryMetric\\":\\"string\\"},\\"properties\\":{\\"additionalProp1\\":\\"strin\
+g\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"searchSpace\\":{\\"name\\":{}},\\"tags\\\
+":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\\"additionalProp3\\":\\"string\\"},\\"timeout\
+\\":\\"PT1M\\",\\"trial\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resource\
+Group-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/mycode/versions/1\\",\\"command\\\
+":\\"python file.py test\\",\\"distribution\\":{\\"distributionType\\":\\"PyTorch\\",\\"processCount\\":2},\\"environme\
+ntId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.Ma\
+chineLearningServices/workspaces/testworkspace/environments/AzureML-Tutorial/versions/1\\",\\"environmentVariables\\":{\
+\\"MY_ENV_VAR1\\":\\"string\\",\\"MY_ENV_VAR2\\":\\"string\\"},\\"inputDataBindings\\":{\\"test\\":{\\"dataId\\":\\"/su\
+bscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningS\
+ervices/workspaces/testworkspace/data/mydataset/versions/1\\",\\"pathOnCompute\\":\\"path/on/compute\\"}},\\"outputData\
+Bindings\\":{\\"test\\":{\\"datastoreId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resour\
+ceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/datastore/mydatastore\\",\\"pathOnCom\
+pute\\":\\"path/on/compute\\"}},\\"timeout\\":\\"PT1M\\"}}" --id "testJob" --resource-group "testrg123" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|object|Additional attributes of the entity.|properties|properties|
+
+#### Command `az machinelearningservices job update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--properties**|object|Additional attributes of the entity.|properties|properties|
+
+#### Command `az machinelearningservices job delete`
+
+##### Example
+```
+az machinelearningservices job delete --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices job cancel`
+
+##### Example
+```
+az machinelearningservices job cancel --id "testJob" --resource-group "testrg123" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the Job.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices labeling-job`
+#### Command `az machinelearningservices labeling-job list`
+
+##### Example
+```
+az machinelearningservices labeling-job list --count "10" --resource-group "workspace-1234" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--count**|integer|Number of labeling jobs to return.|count|count|
+
+#### Command `az machinelearningservices labeling-job show`
+
+##### Example
+```
+az machinelearningservices labeling-job show --id "testLabelingJob" --include-job-instructions true \
+--include-label-categories true --resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--include-job-instructions**|boolean|Boolean value to indicate whether to include JobInstructions in response.|include_job_instructions|includeJobInstructions|
+|**--include-label-categories**|boolean|Boolean value to indicate Whether to include LabelCategories in response.|include_label_categories|includeLabelCategories|
+
+#### Command `az machinelearningservices labeling-job create`
+
+##### Example
+```
+az machinelearningservices labeling-job create --properties description="string" datasetConfiguration={"assetName":"myA\
+sset","datasetVersion":"1","incrementalDatasetRefreshEnabled":true} jobInstructions={"uri":"link/to/instructions"} \
+jobType="Labeling" labelCategories={"myCategory1":{"allowMultiSelect":true,"classes":{"myLabelClass1":{"displayName":"m\
+yLabelClass1","subclasses":{}},"myLabelClass2":{"displayName":"myLabelClass2","subclasses":{}}},"displayName":"myCatego\
+ry1Title"},"myCategory2":{"allowMultiSelect":true,"classes":{"myLabelClass1":{"displayName":"myLabelClass1","subclasses\
+":{}},"myLabelClass2":{"displayName":"myLabelClass2","subclasses":{}}},"displayName":"myCategory2Title"}} \
+labelingJobMediaProperties={"mediaType":"Image"} mlAssistConfiguration={"inferencingComputeBinding":{"instanceCount":1,\
+"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.Mac\
+hineLearningServices/workspaces/testworkspace/computes/myscoringcompute"},"mlAssistEnabled":true,"trainingComputeBindin\
+g":{"instanceCount":1,"target":"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/p\
+roviders/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mytrainingcompute"}} \
+properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+tags={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} --id "testLabelingJob" \
+--resource-group "workspace-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--job-type**|choice|Specifies the type of job. This field should always be set to "Labeling".|job_type|jobType|
+|**--dataset-configuration**|object|Configuration of dataset used in the job.|dataset_configuration|datasetConfiguration|
+|**--description**|string|The asset description text.|description|description|
+|**--label-categories**|dictionary|Label categories of the job.|label_categories|labelCategories|
+|**--labeling-job-image-properties**|object|Properties of a labeling job for image data|labeling_job_image_properties|LabelingJobImageProperties|
+|**--labeling-job-text-properties**|object|Properties of a labeling job for text data|labeling_job_text_properties|LabelingJobTextProperties|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--inferencing-compute-binding**|object|AML compute binding used in inferencing.|inferencing_compute_binding|inferencingComputeBinding|
+|**--ml-assist-enabled**|boolean|Indicates whether MLAssist feature is enabled.|ml_assist_enabled|mlAssistEnabled|
+|**--training-compute-binding**|object|AML compute binding used in training.|training_compute_binding|trainingComputeBinding|
+|**--uri**|string|The link to a page with detailed labeling instructions for labelers.|uri|uri|
+
+#### Command `az machinelearningservices labeling-job update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--job-type**|choice|Specifies the type of job. This field should always be set to "Labeling".|job_type|jobType|
+|**--dataset-configuration**|object|Configuration of dataset used in the job.|dataset_configuration|datasetConfiguration|
+|**--description**|string|The asset description text.|description|description|
+|**--label-categories**|dictionary|Label categories of the job.|label_categories|labelCategories|
+|**--labeling-job-image-properties**|object|Properties of a labeling job for image data|labeling_job_image_properties|LabelingJobImageProperties|
+|**--labeling-job-text-properties**|object|Properties of a labeling job for text data|labeling_job_text_properties|LabelingJobTextProperties|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+|**--inferencing-compute-binding**|object|AML compute binding used in inferencing.|inferencing_compute_binding|inferencingComputeBinding|
+|**--ml-assist-enabled**|boolean|Indicates whether MLAssist feature is enabled.|ml_assist_enabled|mlAssistEnabled|
+|**--training-compute-binding**|object|AML compute binding used in training.|training_compute_binding|trainingComputeBinding|
+|**--uri**|string|The link to a page with detailed labeling instructions for labelers.|uri|uri|
+
+#### Command `az machinelearningservices labeling-job delete`
+
+##### Example
+```
+az machinelearningservices labeling-job delete --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices labeling-job export-label`
+
+##### Example
+```
+az machinelearningservices labeling-job export-label --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--coco-export-summary**|object||coco_export_summary|CocoExportSummary|
+|**--csv-export-summary**|object||csv_export_summary|CsvExportSummary|
+|**--dataset-export-summary**|object||dataset_export_summary|DatasetExportSummary|
+
+#### Command `az machinelearningservices labeling-job pause`
+
+##### Example
+```
+az machinelearningservices labeling-job pause --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices labeling-job resume`
+
+##### Example
+```
+az machinelearningservices labeling-job resume --id "testLabelingJob" --resource-group "workspace-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--id**|string|The name and identifier for the LabelingJob.|id|id|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices model-container`
+#### Command `az machinelearningservices model-container list`
+
+##### Example
+```
+az machinelearningservices model-container list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--count**|integer|Maximum number of results to return.|count|count|
+
+#### Command `az machinelearningservices model-container show`
+
+##### Example
+```
+az machinelearningservices model-container show --name "testContainer" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices model-container create`
+
+##### Example
+```
+az machinelearningservices model-container create --name "testContainer" --properties description="Model container \
+description" tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices model-container update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--description**|string|The asset description text.|description|description|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices model-container delete`
+
+##### Example
+```
+az machinelearningservices model-container delete --name "testContainer" --resource-group "testrg123" --workspace-name \
+"workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices model-version`
+#### Command `az machinelearningservices model-version list`
+
+##### Example
+```
+az machinelearningservices model-version list --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Model name.|name|name|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Maximum number of records to return.|top|$top|
+|**--version**|string|Model version.|version|version|
+|**--description**|string|Model description.|description|description|
+|**--offset**|integer|Number of initial results to skip.|offset|offset|
+|**--tags**|string|Comma-separated list of tag names (and optionally values). Example: tag1,tag2=value2|tags|tags|
+|**--properties**|string|Comma-separated list of property names (and optionally values). Example: prop1,prop2=value2|properties|properties|
+
+#### Command `az machinelearningservices model-version show`
+
+##### Example
+```
+az machinelearningservices model-version show --name "testContainer" --resource-group "testrg123" --version "1" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices model-version create`
+
+##### Example
+```
+az machinelearningservices model-version create --name "testContainer" --properties path="path/in/datastore" \
+description="Model version description" datastoreId="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups\
+/testrg123/providers/Microsoft.MachineLearningServices/workspaces/workspace123/datastores/datastore123" \
+flavors={"python_function":{"data":{"loader_module":"myLoaderModule"}}} properties={"prop1":"value1","prop2":"value2"} \
+tags={"tag1":"value1","tag2":"value2"} --resource-group "testrg123" --version "1" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--path**|string|The path of the file/directory in the datastore.|path|path|
+|**--datastore-id**|string|ARM resource ID of the datastore where the asset is located.|datastore_id|datastoreId|
+|**--description**|string|The asset description text.|description|description|
+|**--flavors**|dictionary|Mapping of model flavors to their properties.|flavors|flavors|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices model-version update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--path**|string|The path of the file/directory in the datastore.|path|path|
+|**--datastore-id**|string|ARM resource ID of the datastore where the asset is located.|datastore_id|datastoreId|
+|**--description**|string|The asset description text.|description|description|
+|**--flavors**|dictionary|Mapping of model flavors to their properties.|flavors|flavors|
+|**--is-anonymous**|boolean|If the name version are system generated (anonymous registration).|is_anonymous|isAnonymous|
+|**--properties**|dictionary|The asset property dictionary.|properties|properties|
+|**--tags**|dictionary|Tag dictionary. Tags can be added, removed, and updated.|tags|tags|
+
+#### Command `az machinelearningservices model-version delete`
+
+##### Example
+```
+az machinelearningservices model-version delete --name "testContainer" --resource-group "testrg123" --version "999" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--name**|string|Container name.|name|name|
+|**--version**|string|Version identifier.|version|version|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices online-deployment`
+#### Command `az machinelearningservices online-deployment list`
+
+##### Example
+```
+az machinelearningservices online-deployment list --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--order-by**|string|Ordering of list.|order_by|$orderBy|
+|**--top**|integer|Top of list.|top|$top|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices online-deployment show`
+
+##### Example
+```
+az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Example
+```
+az machinelearningservices online-deployment show --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-deployment create`
+
+##### Example
+```
+az machinelearningservices online-deployment create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties "{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfigu\
+ration\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/provid\
+ers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/code123/versions/1\\",\\"scoringScript\\":\\"strin\
+g\\"},\\"containerResourceRequirements\\":{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\"memoryInGBLimit\\":64},\
+\\"endpointComputeType\\":\\"K8S\\",\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resource\
+Groups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/env123\\",\
+\\"livenessProbe\\":{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshol\
+d\\":50,\\"timeout\\":\\"PT1M\\"},\\"model\\":{\\"assetId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/res\
+ourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/models/model123\\",\
+\\"referenceType\\":\\"Id\\"},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"string\\",\
+\\"additionalProp3\\":\\"string\\"},\\"provisioningState\\":\\"Creating\\",\\"requestSettings\\":{\\"maxConcurrentReque\
+stsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTimeout\\":\\"PT1M\\"},\\"scaleSettings\\":{\\"pollingInter\
+val\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"targetUtilizationPercentage\\":50}}" --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Example
+```
+az machinelearningservices online-deployment create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties "{\\"description\\":\\"string\\",\\"appInsightsEnabled\\":true,\\"codeConfigu\
+ration\\":{\\"codeId\\":\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/provid\
+ers/Microsoft.MachineLearningServices/workspaces/testworkspace/codes/code123/versions/1\\",\\"scoringScript\\":\\"strin\
+g\\"},\\"endpointComputeType\\":\\"Managed\\",\\"environmentId\\":\\"/subscriptions/00000000-1111-2222-3333-44444444444\
+4/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/environments/e\
+nv123\\",\\"livenessProbe\\":{\\"failureThreshold\\":50,\\"initialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"succes\
+sThreshold\\":50,\\"timeout\\":\\"PT1M\\"},\\"model\\":{\\"assetId\\":\\"/subscriptions/00000000-1111-2222-3333-4444444\
+44444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/models/mod\
+el123\\",\\"referenceType\\":\\"Id\\"},\\"properties\\":{\\"additionalProp1\\":\\"string\\",\\"additionalProp2\\":\\"st\
+ring\\",\\"additionalProp3\\":\\"string\\"},\\"provisioningState\\":\\"Creating\\",\\"requestSettings\\":{\\"maxConcurr\
+entRequestsPerInstance\\":5,\\"maxQueueWait\\":\\"PT1M\\",\\"requestTimeout\\":\\"PT1M\\"},\\"scaleSettings\\":{\\"poll\
+ingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\",\\"targetUtilizationPercentage\\":50}}" --tags \
+additionalProp1="string" additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" \
+--endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|The geo-location where the resource lives|location|location|
+|**--properties**|object|Additional attributes of the entity.|properties|properties|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-deployment update`
+
+##### Example
+```
+az machinelearningservices online-deployment update --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --properties "{\\"containerResourceRequirements\\":{\\"cpu\\":4,\\"cpuLimit\\":4,\\"memoryInGB\\":64,\\"memory\
+InGBLimit\\":64},\\"endpointComputeType\\":\\"K8S\\",\\"scaleSettings\\":{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\
+\\":\\"Auto\\"}}" --tags additionalProp1="string" additionalProp2="string" additionalProp3="string" --deployment-name \
+"testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Example
+```
+az machinelearningservices online-deployment update --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --properties "{\\"endpointComputeType\\":\\"Managed\\",\\"readinessProbe\\":{\\"failureThreshold\\":50,\\"init\
+ialDelay\\":\\"PT1M\\",\\"period\\":\\"PT1M\\",\\"successThreshold\\":50,\\"timeout\\":\\"PT1M\\"},\\"scaleSettings\\":\
+{\\"pollingInterval\\":\\"PT1M\\",\\"scaleType\\":\\"Auto\\"}}" --tags additionalProp1="string" \
+additionalProp2="string" additionalProp3="string" --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--location**|string|The geo-location where the resource lives.|location|location|
+|**--properties**|object|Additional attributes of the entity.|properties|properties|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-deployment delete`
+
+##### Example
+```
+az machinelearningservices online-deployment delete --deployment-name "testDeployment" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|Inference Endpoint Deployment name.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-deployment get-log`
+
+##### Example
+```
+az machinelearningservices online-deployment get-log --container-type "StorageInitializer" --tail 0 --deployment-name \
+"testDeployment" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Inference endpoint name.|endpoint_name|endpointName|
+|**--deployment-name**|string|The name and identifier for the endpoint.|deployment_name|deploymentName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--container-type**|choice|The type of container to retrieve logs from.|container_type|containerType|
+|**--tail**|integer|The maximum number of lines to tail.|tail|tail|
+
+### group `az machinelearningservices online-endpoint`
+#### Command `az machinelearningservices online-endpoint list`
+
+##### Example
+```
+az machinelearningservices online-endpoint list --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--name**|string|Name of the endpoint.|name|name|
+|**--count**|integer|Number of endpoints to be retrieved in a page of results.|count|count|
+|**--compute-type**|choice|EndpointComputeType to be filtered by.|compute_type|computeType|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--tags**|string|A set of tags with which to filter the returned models. It is a comma separated string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 .|tags|tags|
+|**--properties**|string|A set of properties with which to filter the returned models. It is a comma separated string of properties key and/or properties key=value Example: propKey1,propKey2,propKey3=value3 .|properties|properties|
+|**--order-by**|choice|The option to order the response.|order_by|orderBy|
+
+#### Command `az machinelearningservices online-endpoint show`
+
+##### Example
+```
+az machinelearningservices online-endpoint show --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint create`
+
+##### Example
+```
+az machinelearningservices online-endpoint create --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --location "string" --properties description="string" authMode="AMLToken" keys={"primaryKey":"string","seconda\
+ryKey":"string"} properties={"additionalProp1":"string","additionalProp2":"string","additionalProp3":"string"} \
+target="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.Machi\
+neLearningServices/workspaces/testworkspace/computes/compute123" traffic={"myDeployment1":0,"myDeployment2":1} --tags \
+additionalProp1="string" additionalProp2="string" additionalProp3="string" --endpoint-name "testEndpoint" \
+--resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|The geo-location where the resource lives|location|location|
+|**--auth-mode**|choice|Inference endpoint authentication mode type|auth_mode|authMode|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--description**|string|Description of the inference endpoint.|description|description|
+|**--keys**|object|EndpointAuthKeys to set initially on an Endpoint. This property will always be returned as null. AuthKey values must be retrieved using the ListKeys API.|keys|keys|
+|**--properties**|dictionary|Property dictionary. Properties can be added, but not removed or altered.|properties|properties|
+|**--target**|string|ARM resource ID of the compute if it exists. optional|target|target|
+|**--traffic**|dictionary|Traffic rules on how the traffic will be routed across deployments.|traffic|traffic|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-endpoint update`
+
+##### Example
+```
+az machinelearningservices online-endpoint update --type "UserAssigned" --user-assigned-identities \
+"{\\"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.ManagedI\
+dentity/userAssignedIdentities/myuseridentity\\":{\\"clientId\\":\\"string\\",\\"principalId\\":\\"string\\"}}" --kind \
+"string" --traffic myDeployment1=0 myDeployment2=1 --tags additionalProp1="string" additionalProp2="string" \
+additionalProp3="string" --endpoint-name "testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--kind**|string|Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type.|kind|kind|
+|**--location**|string|The geo-location where the resource lives.|location|location|
+|**--tags**|dictionary|Resource tags.|tags|tags|
+|**--traffic**|dictionary|Traffic rules on how the traffic will be routed across deployments.|traffic|traffic|
+|**--type**|choice|Defines values for a ResourceIdentity's type.|type|type|
+|**--user-assigned-identities**|dictionary|Dictionary of the user assigned identities, key is ARM resource ID of the UAI.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices online-endpoint delete`
+
+##### Example
+```
+az machinelearningservices online-endpoint delete --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint get-token`
+
+##### Example
+```
+az machinelearningservices online-endpoint get-token --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint list-key`
+
+##### Example
+```
+az machinelearningservices online-endpoint list-key --endpoint-name "testEndpoint" --resource-group "testrg123" \
+--workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices online-endpoint regenerate-key`
+
+##### Example
+```
+az machinelearningservices online-endpoint regenerate-key --key-type "Primary" --key-value "string" --endpoint-name \
+"testEndpoint" --resource-group "testrg123" --workspace-name "workspace123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--endpoint-name**|string|Online Endpoint name.|endpoint_name|endpointName|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--key-type**|choice|Specification for which type of key to generate. Primary or Secondary.|key_type|keyType|
+|**--key-value**|string|The value the key is set to.|key_value|keyValue|
+
+### group `az machinelearningservices private-endpoint-connection`
+#### Command `az machinelearningservices private-endpoint-connection list`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices private-endpoint-connection show`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" --resource-group \
+"rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection create`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection create --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices private-endpoint-connection update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices private-endpoint-connection delete`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+### group `az machinelearningservices private-link-resource`
+#### Command `az machinelearningservices private-link-resource list`
+
+##### Example
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices quota`
+#### Command `az machinelearningservices quota list`
+
+##### Example
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+#### Command `az machinelearningservices quota update`
+
+##### Example
+```
+az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServices/workspaces/\
+quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningSe\
+rvices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" --value \
+type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/reso\
+urceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluste\
+r_Dedicated_vCPUs" limit=200 unit="Count"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for update quota is queried.|location|location|
+|**--value**|array|The list for update quota.|value|value|
+|**--quota-update-parameters-location**|string|Region of workspace quota to be updated.|quota_update_parameters_location|location|
+
+### group `az machinelearningservices usage`
+#### Command `az machinelearningservices usage list`
+
+##### Example
+```
+az machinelearningservices usage list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+### group `az machinelearningservices virtual-machine-size`
+#### Command `az machinelearningservices virtual-machine-size list`
+
+##### Example
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location upon which virtual-machine-sizes is queried.|location|location|
+
+### group `az machinelearningservices workspace`
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+#### Command `az machinelearningservices workspace show`
+
+##### Example
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace create`
+
+##### Example
+```
+az machinelearningservices workspace create --identity type="SystemAssigned,UserAssigned" \
+userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mi\
+crosoft.ManagedIdentity/userAssignedIdentities/testuai":{}} --location "eastus2euap" --description "test description" \
+--application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/mic\
+rosoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/reso\
+urceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" --identity \
+user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mic\
+rosoft.ManagedIdentity/userAssignedIdentities/testuai" --key-vault-properties identity-client-id="" \
+key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" \
+key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft\
+.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false --key-vault \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/\
+testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-22\
+22-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/priva\
+teLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace. This name in mutable|friendly_name|friendlyName|
+|**--key-vault**|string|ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created|key_vault|keyVault|
+|**--application-insights**|string|ARM id of the application insights associated with this workspace. This cannot be changed once the workspace has been created|application_insights|applicationInsights|
+|**--container-registry**|string|ARM id of the container registry associated with this workspace. This cannot be changed once the workspace has been created|container_registry|containerRegistry|
+|**--storage-account**|string|ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created|storage_account|storageAccount|
+|**--discovery-url**|string|Url for the discovery service to identify regional endpoints for machine learning experimentation services|discovery_url|discoveryUrl|
+|**--hbi-workspace**|boolean|The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service|hbi_workspace|hbiWorkspace|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--allow-public-access-when-behind-vnet**|boolean|The flag to indicate whether to allow public access when behind VNet.|allow_public_access_when_behind_vnet|allowPublicAccessWhenBehindVnet|
+|**--shared-private-link-resources**|array|The list of shared private link resources in this workspace.|shared_private_link_resources|sharedPrivateLinkResources|
+|**--primary-user-assigned-identity**|string|The user assigned identity resource id that represents the workspace identity.|primary_user_assigned_identity|primaryUserAssignedIdentity|
+|**--collections-throughput**|integer|The throughput of the collections in cosmosdb database|collections_throughput|collectionsThroughput|
+|**--status**|choice|Indicates whether or not the encryption is enabled for the workspace.|status|status|
+|**--identity**|object|The identity that will be used to access the key vault for encryption at rest.|identity|identity|
+|**--key-vault-properties**|object|Customer Key vault properties.|key_vault_properties|keyVaultProperties|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices workspace update`
+
+##### Example
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+--resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--tags**|dictionary|The resource tags for the machine learning workspace.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace.|friendly_name|friendlyName|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--primary-user-assigned-identity**|string|The user assigned identity resource id that represents the workspace identity.|primary_user_assigned_identity|primaryUserAssignedIdentity|
+|**--collections-throughput**|integer|The throughput of the collections in cosmosdb database|collections_throughput|collectionsThroughput|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices workspace delete`
+
+##### Example
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-key`
+
+##### Example
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-notebook-access-token`
+
+##### Example
+```
+az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" --name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-notebook-key`
+
+##### Example
+```
+az machinelearningservices workspace list-notebook-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-storage-account-key`
+
+##### Example
+```
+az machinelearningservices workspace list-storage-account-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace prepare-notebook`
+
+##### Example
+```
+az machinelearningservices workspace prepare-notebook --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace resync-key`
+
+##### Example
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices workspace-connection`
+#### Command `az machinelearningservices workspace-connection list`
+
+##### Example
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" --target \
+"www.facebook.com" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--category**|string|Category of the workspace connection.|category|category|
+
+#### Command `az machinelearningservices workspace-connection show`
+
+##### Example
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+#### Command `az machinelearningservices workspace-connection create`
+
+##### Example
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --auth-type "PAT" --category \
+"ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+|**--category**|string|Category of the workspace connection.|category|category|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--auth-type**|string|Authorization type of the workspace connection.|auth_type|authType|
+|**--value**|string|Value details of the workspace connection.|value|value|
+
+#### Command `az machinelearningservices workspace-connection delete`
+
+##### Example
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+### group `az machinelearningservices workspace-feature`
+#### Command `az machinelearningservices workspace-feature list`
+
+##### Example
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices workspace-sku`
+#### Command `az machinelearningservices workspace-sku list`
+
+##### Example
+```
+az machinelearningservices workspace-sku list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
\ No newline at end of file
diff --git a/src/machinelearningservices/setup.cfg b/src/machinelearningservices/setup.cfg
new file mode 100644
index 00000000000..2fdd96e5d39
--- /dev/null
+++ b/src/machinelearningservices/setup.cfg
@@ -0,0 +1 @@
+#setup.cfg
\ No newline at end of file
diff --git a/src/machinelearningservices/setup.py b/src/machinelearningservices/setup.py
new file mode 100644
index 00000000000..e4ec7166802
--- /dev/null
+++ b/src/machinelearningservices/setup.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# --------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------------------------
+
+
+from codecs import open
+from setuptools import setup, find_packages
+
+# HISTORY.rst entry.
+VERSION = '0.1.0'
+try:
+ from azext_machinelearningservices.manual.version import VERSION
+except ImportError:
+ pass
+
+# The full list of classifiers is available at
+# https://pypi.python.org/pypi?%3Aaction=list_classifiers
+CLASSIFIERS = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'License :: OSI Approved :: MIT License',
+]
+
+DEPENDENCIES = []
+
+try:
+ from azext_machinelearningservices.manual.dependency import DEPENDENCIES
+except ImportError:
+ pass
+
+with open('README.md', 'r', encoding='utf-8') as f:
+ README = f.read()
+with open('HISTORY.rst', 'r', encoding='utf-8') as f:
+ HISTORY = f.read()
+
+setup(
+ name='machinelearningservices',
+ version=VERSION,
+ description='Microsoft Azure Command-Line Tools AzureMachineLearningWorkspaces Extension',
+ author='Microsoft Corporation',
+ author_email='azpycli@microsoft.com',
+ url='https://github.com/Azure/azure-cli-extensions/tree/master/src/machinelearningservices',
+ long_description=README + '\n\n' + HISTORY,
+ license='MIT',
+ classifiers=CLASSIFIERS,
+ packages=find_packages(),
+ install_requires=DEPENDENCIES,
+ package_data={'azext_machinelearningservices': ['azext_metadata.json']},
+)